{ "best_metric": null, "best_model_checkpoint": null, "epoch": 2.251894240449319, "eval_steps": 3000, "global_step": 255000, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.00044154789028418023, "grad_norm": 4.407685279846191, "learning_rate": 4.800000000000001e-06, "loss": 4.2565, "step": 50 }, { "epoch": 0.0008830957805683605, "grad_norm": 2.3940842151641846, "learning_rate": 9.800000000000001e-06, "loss": 4.3171, "step": 100 }, { "epoch": 0.0013246436708525408, "grad_norm": 2.555833339691162, "learning_rate": 1.48e-05, "loss": 3.8795, "step": 150 }, { "epoch": 0.001766191561136721, "grad_norm": 0.9911451935768127, "learning_rate": 1.97e-05, "loss": 3.9588, "step": 200 }, { "epoch": 0.0022077394514209013, "grad_norm": 1.1606439352035522, "learning_rate": 2.47e-05, "loss": 3.4062, "step": 250 }, { "epoch": 0.0026492873417050815, "grad_norm": 2.9307570457458496, "learning_rate": 2.97e-05, "loss": 3.5316, "step": 300 }, { "epoch": 0.0030908352319892617, "grad_norm": 2.6516165733337402, "learning_rate": 3.4699999999999996e-05, "loss": 3.5324, "step": 350 }, { "epoch": 0.003532383122273442, "grad_norm": 1.69759202003479, "learning_rate": 3.97e-05, "loss": 3.6864, "step": 400 }, { "epoch": 0.003973931012557622, "grad_norm": 3.0430002212524414, "learning_rate": 4.47e-05, "loss": 3.6768, "step": 450 }, { "epoch": 0.004415478902841803, "grad_norm": 4.26913595199585, "learning_rate": 4.97e-05, "loss": 3.9357, "step": 500 }, { "epoch": 0.004857026793125983, "grad_norm": 1.9752435684204102, "learning_rate": 5.470000000000001e-05, "loss": 3.4989, "step": 550 }, { "epoch": 0.005298574683410163, "grad_norm": 2.631711006164551, "learning_rate": 5.96e-05, "loss": 3.4577, "step": 600 }, { "epoch": 0.005740122573694343, "grad_norm": 4.840415000915527, "learning_rate": 6.460000000000001e-05, "loss": 3.6496, "step": 650 }, { "epoch": 0.006181670463978523, "grad_norm": 0.7839917540550232, "learning_rate": 6.96e-05, "loss": 3.4212, "step": 700 }, { "epoch": 0.0066232183542627035, "grad_norm": 2.6035537719726562, "learning_rate": 7.46e-05, "loss": 3.5461, "step": 750 }, { "epoch": 0.007064766244546884, "grad_norm": 2.7785916328430176, "learning_rate": 7.960000000000001e-05, "loss": 3.5111, "step": 800 }, { "epoch": 0.007506314134831064, "grad_norm": 3.2137913703918457, "learning_rate": 8.46e-05, "loss": 3.4488, "step": 850 }, { "epoch": 0.007947862025115243, "grad_norm": 2.4404215812683105, "learning_rate": 8.960000000000001e-05, "loss": 3.6674, "step": 900 }, { "epoch": 0.008389409915399423, "grad_norm": 2.055772542953491, "learning_rate": 9.46e-05, "loss": 3.5125, "step": 950 }, { "epoch": 0.008830957805683605, "grad_norm": 6.80244255065918, "learning_rate": 9.960000000000001e-05, "loss": 3.7395, "step": 1000 }, { "epoch": 0.009272505695967785, "grad_norm": 1.6064447164535522, "learning_rate": 9.999999836556943e-05, "loss": 3.2688, "step": 1050 }, { "epoch": 0.009714053586251966, "grad_norm": 1.4385908842086792, "learning_rate": 9.999999288142163e-05, "loss": 3.7015, "step": 1100 }, { "epoch": 0.010155601476536146, "grad_norm": 2.0870840549468994, "learning_rate": 9.999998353519836e-05, "loss": 3.6805, "step": 1150 }, { "epoch": 0.010597149366820326, "grad_norm": 1.5181084871292114, "learning_rate": 9.999997032690033e-05, "loss": 3.9943, "step": 1200 }, { "epoch": 0.011038697257104506, "grad_norm": 2.0641415119171143, "learning_rate": 9.99999532565286e-05, "loss": 3.4652, "step": 1250 }, { "epoch": 0.011480245147388686, "grad_norm": 4.051513671875, "learning_rate": 9.999993232408446e-05, "loss": 3.784, "step": 1300 }, { "epoch": 0.011921793037672867, "grad_norm": 2.475578546524048, "learning_rate": 9.999990752956954e-05, "loss": 3.6213, "step": 1350 }, { "epoch": 0.012363340927957047, "grad_norm": 2.650278091430664, "learning_rate": 9.999987887298574e-05, "loss": 3.2759, "step": 1400 }, { "epoch": 0.012804888818241227, "grad_norm": 1.043623924255371, "learning_rate": 9.999984635433529e-05, "loss": 3.4098, "step": 1450 }, { "epoch": 0.013246436708525407, "grad_norm": 2.3483691215515137, "learning_rate": 9.999980997362069e-05, "loss": 3.5984, "step": 1500 }, { "epoch": 0.013687984598809587, "grad_norm": 1.8058457374572754, "learning_rate": 9.999976973084477e-05, "loss": 3.5686, "step": 1550 }, { "epoch": 0.014129532489093767, "grad_norm": 1.9905356168746948, "learning_rate": 9.999972562601062e-05, "loss": 3.5255, "step": 1600 }, { "epoch": 0.014571080379377948, "grad_norm": 8.096346855163574, "learning_rate": 9.999967765912164e-05, "loss": 3.8234, "step": 1650 }, { "epoch": 0.015012628269662128, "grad_norm": 3.1411712169647217, "learning_rate": 9.999962583018154e-05, "loss": 3.3328, "step": 1700 }, { "epoch": 0.015454176159946308, "grad_norm": 4.46273946762085, "learning_rate": 9.999957013919435e-05, "loss": 3.2951, "step": 1750 }, { "epoch": 0.015895724050230486, "grad_norm": 1.3609850406646729, "learning_rate": 9.999951058616435e-05, "loss": 3.3173, "step": 1800 }, { "epoch": 0.016337271940514667, "grad_norm": 0.8974264860153198, "learning_rate": 9.999944717109613e-05, "loss": 3.5905, "step": 1850 }, { "epoch": 0.016778819830798847, "grad_norm": 4.310591220855713, "learning_rate": 9.999937989399459e-05, "loss": 3.2723, "step": 1900 }, { "epoch": 0.017220367721083027, "grad_norm": 10.745691299438477, "learning_rate": 9.999930875486495e-05, "loss": 3.3104, "step": 1950 }, { "epoch": 0.01766191561136721, "grad_norm": 1.678096890449524, "learning_rate": 9.999923375371269e-05, "loss": 3.4128, "step": 2000 }, { "epoch": 0.01810346350165139, "grad_norm": 1.2710167169570923, "learning_rate": 9.99991548905436e-05, "loss": 3.8629, "step": 2050 }, { "epoch": 0.01854501139193557, "grad_norm": 1.6546989679336548, "learning_rate": 9.999907216536378e-05, "loss": 3.9797, "step": 2100 }, { "epoch": 0.01898655928221975, "grad_norm": 2.4010069370269775, "learning_rate": 9.999898557817962e-05, "loss": 3.4582, "step": 2150 }, { "epoch": 0.01942810717250393, "grad_norm": 11.584942817687988, "learning_rate": 9.999889512899778e-05, "loss": 3.4392, "step": 2200 }, { "epoch": 0.01986965506278811, "grad_norm": 2.588017702102661, "learning_rate": 9.999880081782529e-05, "loss": 3.2102, "step": 2250 }, { "epoch": 0.02031120295307229, "grad_norm": 4.959451198577881, "learning_rate": 9.99987026446694e-05, "loss": 3.4403, "step": 2300 }, { "epoch": 0.020752750843356472, "grad_norm": 2.886789560317993, "learning_rate": 9.999860060953772e-05, "loss": 3.3001, "step": 2350 }, { "epoch": 0.021194298733640652, "grad_norm": 7.682253837585449, "learning_rate": 9.999849471243812e-05, "loss": 3.8255, "step": 2400 }, { "epoch": 0.021635846623924832, "grad_norm": 9.694388389587402, "learning_rate": 9.999838495337877e-05, "loss": 3.8851, "step": 2450 }, { "epoch": 0.022077394514209012, "grad_norm": 2.9934961795806885, "learning_rate": 9.999827133236815e-05, "loss": 3.6424, "step": 2500 }, { "epoch": 0.022518942404493193, "grad_norm": 1.4004652500152588, "learning_rate": 9.999815384941506e-05, "loss": 3.5799, "step": 2550 }, { "epoch": 0.022960490294777373, "grad_norm": 2.06247615814209, "learning_rate": 9.999803250452856e-05, "loss": 3.4358, "step": 2600 }, { "epoch": 0.023402038185061553, "grad_norm": 2.051114082336426, "learning_rate": 9.999790729771798e-05, "loss": 3.6721, "step": 2650 }, { "epoch": 0.023843586075345733, "grad_norm": 2.4515838623046875, "learning_rate": 9.999777822899307e-05, "loss": 3.6419, "step": 2700 }, { "epoch": 0.024285133965629913, "grad_norm": 2.6057190895080566, "learning_rate": 9.999764529836375e-05, "loss": 3.4357, "step": 2750 }, { "epoch": 0.024726681855914093, "grad_norm": 2.1552577018737793, "learning_rate": 9.99975085058403e-05, "loss": 3.6189, "step": 2800 }, { "epoch": 0.025168229746198274, "grad_norm": 4.0472846031188965, "learning_rate": 9.999736785143327e-05, "loss": 3.5135, "step": 2850 }, { "epoch": 0.025609777636482454, "grad_norm": 1.694504976272583, "learning_rate": 9.999722333515355e-05, "loss": 3.5287, "step": 2900 }, { "epoch": 0.026051325526766634, "grad_norm": 8.814433097839355, "learning_rate": 9.99970749570123e-05, "loss": 3.4025, "step": 2950 }, { "epoch": 0.026492873417050814, "grad_norm": 2.079963207244873, "learning_rate": 9.999692579966685e-05, "loss": 3.8211, "step": 3000 }, { "epoch": 0.026492873417050814, "eval_asr_loss": 0.8104195239432453, "eval_loss": 3.4588043689727783, "eval_runtime": 20.2586, "eval_samples_per_second": 37.91, "eval_steps_per_second": 9.477, "eval_tts_loss": 6.061187094347814, "step": 3000 }, { "epoch": 0.026934421307334994, "grad_norm": 2.6620876789093018, "learning_rate": 9.999676977507384e-05, "loss": 3.4284, "step": 3050 }, { "epoch": 0.027375969197619174, "grad_norm": 10.126931190490723, "learning_rate": 9.999660988865433e-05, "loss": 3.756, "step": 3100 }, { "epoch": 0.027817517087903355, "grad_norm": 1.450592279434204, "learning_rate": 9.999644614042068e-05, "loss": 3.2147, "step": 3150 }, { "epoch": 0.028259064978187535, "grad_norm": 1.703476071357727, "learning_rate": 9.99962785303855e-05, "loss": 3.5119, "step": 3200 }, { "epoch": 0.028700612868471715, "grad_norm": 2.767446756362915, "learning_rate": 9.999610705856177e-05, "loss": 3.2736, "step": 3250 }, { "epoch": 0.029142160758755895, "grad_norm": 1.1504226922988892, "learning_rate": 9.999593172496273e-05, "loss": 3.4068, "step": 3300 }, { "epoch": 0.029583708649040075, "grad_norm": 1.6964422464370728, "learning_rate": 9.99957525296019e-05, "loss": 3.4917, "step": 3350 }, { "epoch": 0.030025256539324256, "grad_norm": 1.8230127096176147, "learning_rate": 9.999556947249316e-05, "loss": 3.1574, "step": 3400 }, { "epoch": 0.030466804429608436, "grad_norm": 3.8012404441833496, "learning_rate": 9.999538255365062e-05, "loss": 3.6179, "step": 3450 }, { "epoch": 0.030908352319892616, "grad_norm": 3.4073257446289062, "learning_rate": 9.999519177308871e-05, "loss": 3.6502, "step": 3500 }, { "epoch": 0.03134990021017679, "grad_norm": 1.7333426475524902, "learning_rate": 9.99949971308222e-05, "loss": 3.5175, "step": 3550 }, { "epoch": 0.03179144810046097, "grad_norm": 1.5845162868499756, "learning_rate": 9.99947986268661e-05, "loss": 3.4373, "step": 3600 }, { "epoch": 0.03223299599074515, "grad_norm": 4.4948225021362305, "learning_rate": 9.999459626123576e-05, "loss": 3.6571, "step": 3650 }, { "epoch": 0.03267454388102933, "grad_norm": 3.1738150119781494, "learning_rate": 9.99943900339468e-05, "loss": 3.6251, "step": 3700 }, { "epoch": 0.03311609177131351, "grad_norm": 2.0190846920013428, "learning_rate": 9.999417994501514e-05, "loss": 3.5417, "step": 3750 }, { "epoch": 0.033557639661597694, "grad_norm": 3.240422248840332, "learning_rate": 9.999396599445701e-05, "loss": 3.5891, "step": 3800 }, { "epoch": 0.033999187551881874, "grad_norm": 5.438379287719727, "learning_rate": 9.999374818228895e-05, "loss": 3.6557, "step": 3850 }, { "epoch": 0.034440735442166054, "grad_norm": 1.6964021921157837, "learning_rate": 9.999352650852778e-05, "loss": 3.4464, "step": 3900 }, { "epoch": 0.03488228333245024, "grad_norm": 1.759133219718933, "learning_rate": 9.999330097319063e-05, "loss": 3.5975, "step": 3950 }, { "epoch": 0.03532383122273442, "grad_norm": 1.7858753204345703, "learning_rate": 9.999307157629489e-05, "loss": 3.8846, "step": 4000 }, { "epoch": 0.0357653791130186, "grad_norm": 1.641852855682373, "learning_rate": 9.999283831785833e-05, "loss": 3.7919, "step": 4050 }, { "epoch": 0.03620692700330278, "grad_norm": 1.8461246490478516, "learning_rate": 9.99926011978989e-05, "loss": 3.2686, "step": 4100 }, { "epoch": 0.03664847489358696, "grad_norm": 2.8383119106292725, "learning_rate": 9.999236021643498e-05, "loss": 3.1711, "step": 4150 }, { "epoch": 0.03709002278387114, "grad_norm": 1.094404935836792, "learning_rate": 9.999211537348514e-05, "loss": 3.4461, "step": 4200 }, { "epoch": 0.03753157067415532, "grad_norm": 1.1627689599990845, "learning_rate": 9.999186666906832e-05, "loss": 3.2783, "step": 4250 }, { "epoch": 0.0379731185644395, "grad_norm": 2.673081398010254, "learning_rate": 9.999161410320373e-05, "loss": 3.3066, "step": 4300 }, { "epoch": 0.03841466645472368, "grad_norm": 2.3519914150238037, "learning_rate": 9.999135767591083e-05, "loss": 3.5882, "step": 4350 }, { "epoch": 0.03885621434500786, "grad_norm": 2.861649990081787, "learning_rate": 9.999109738720949e-05, "loss": 3.3404, "step": 4400 }, { "epoch": 0.03929776223529204, "grad_norm": 2.392594575881958, "learning_rate": 9.999083323711979e-05, "loss": 3.4555, "step": 4450 }, { "epoch": 0.03973931012557622, "grad_norm": 2.1783764362335205, "learning_rate": 9.999056522566213e-05, "loss": 3.3032, "step": 4500 }, { "epoch": 0.0401808580158604, "grad_norm": 3.0805599689483643, "learning_rate": 9.99902933528572e-05, "loss": 3.7245, "step": 4550 }, { "epoch": 0.04062240590614458, "grad_norm": 0.8756254315376282, "learning_rate": 9.999001761872601e-05, "loss": 3.0947, "step": 4600 }, { "epoch": 0.041063953796428763, "grad_norm": 8.237518310546875, "learning_rate": 9.998973802328988e-05, "loss": 3.369, "step": 4650 }, { "epoch": 0.041505501686712944, "grad_norm": 1.5842547416687012, "learning_rate": 9.998945456657039e-05, "loss": 3.6075, "step": 4700 }, { "epoch": 0.041947049576997124, "grad_norm": 2.602689504623413, "learning_rate": 9.998916724858942e-05, "loss": 3.7514, "step": 4750 }, { "epoch": 0.042388597467281304, "grad_norm": 3.264051914215088, "learning_rate": 9.998887606936918e-05, "loss": 3.435, "step": 4800 }, { "epoch": 0.042830145357565484, "grad_norm": 2.2757341861724854, "learning_rate": 9.998858102893215e-05, "loss": 3.6671, "step": 4850 }, { "epoch": 0.043271693247849664, "grad_norm": 1.5097368955612183, "learning_rate": 9.998828212730115e-05, "loss": 3.4472, "step": 4900 }, { "epoch": 0.043713241138133845, "grad_norm": 2.7084147930145264, "learning_rate": 9.998797936449922e-05, "loss": 3.8243, "step": 4950 }, { "epoch": 0.044154789028418025, "grad_norm": 2.9833407402038574, "learning_rate": 9.998767274054978e-05, "loss": 3.7378, "step": 5000 }, { "epoch": 0.044596336918702205, "grad_norm": 9.48874568939209, "learning_rate": 9.998736225547651e-05, "loss": 3.4267, "step": 5050 }, { "epoch": 0.045037884808986385, "grad_norm": 1.7265108823776245, "learning_rate": 9.998704790930337e-05, "loss": 3.4905, "step": 5100 }, { "epoch": 0.045479432699270565, "grad_norm": 1.690320372581482, "learning_rate": 9.998672970205467e-05, "loss": 3.4667, "step": 5150 }, { "epoch": 0.045920980589554745, "grad_norm": 3.318742275238037, "learning_rate": 9.998640763375497e-05, "loss": 3.4528, "step": 5200 }, { "epoch": 0.046362528479838926, "grad_norm": 3.2868552207946777, "learning_rate": 9.998608170442915e-05, "loss": 3.5877, "step": 5250 }, { "epoch": 0.046804076370123106, "grad_norm": 3.8400142192840576, "learning_rate": 9.99857519141024e-05, "loss": 3.5318, "step": 5300 }, { "epoch": 0.047245624260407286, "grad_norm": 1.9866262674331665, "learning_rate": 9.998541826280018e-05, "loss": 3.7204, "step": 5350 }, { "epoch": 0.047687172150691466, "grad_norm": 2.3364500999450684, "learning_rate": 9.998508075054826e-05, "loss": 3.3647, "step": 5400 }, { "epoch": 0.048128720040975646, "grad_norm": 3.461249589920044, "learning_rate": 9.99847393773727e-05, "loss": 3.4072, "step": 5450 }, { "epoch": 0.048570267931259826, "grad_norm": 1.401358962059021, "learning_rate": 9.99843941432999e-05, "loss": 3.506, "step": 5500 }, { "epoch": 0.04901181582154401, "grad_norm": 2.5606305599212646, "learning_rate": 9.99840450483565e-05, "loss": 3.3622, "step": 5550 }, { "epoch": 0.04945336371182819, "grad_norm": 2.909435510635376, "learning_rate": 9.998369209256947e-05, "loss": 3.1674, "step": 5600 }, { "epoch": 0.04989491160211237, "grad_norm": 1.5808318853378296, "learning_rate": 9.998333527596607e-05, "loss": 3.5161, "step": 5650 }, { "epoch": 0.05033645949239655, "grad_norm": 1.6047215461730957, "learning_rate": 9.998297459857387e-05, "loss": 3.528, "step": 5700 }, { "epoch": 0.05077800738268073, "grad_norm": 2.816723346710205, "learning_rate": 9.998261006042074e-05, "loss": 3.1358, "step": 5750 }, { "epoch": 0.05121955527296491, "grad_norm": 1.9394768476486206, "learning_rate": 9.99822416615348e-05, "loss": 2.8468, "step": 5800 }, { "epoch": 0.05166110316324909, "grad_norm": 7.701488971710205, "learning_rate": 9.998186940194454e-05, "loss": 3.5079, "step": 5850 }, { "epoch": 0.05210265105353327, "grad_norm": 1.9361647367477417, "learning_rate": 9.998150084191847e-05, "loss": 3.1601, "step": 5900 }, { "epoch": 0.05254419894381745, "grad_norm": 1.7290771007537842, "learning_rate": 9.998112093821874e-05, "loss": 3.5756, "step": 5950 }, { "epoch": 0.05298574683410163, "grad_norm": 7.283807754516602, "learning_rate": 9.998073717390126e-05, "loss": 3.3956, "step": 6000 }, { "epoch": 0.05298574683410163, "eval_asr_loss": 0.8455491743058053, "eval_loss": 3.3733489513397217, "eval_runtime": 20.0292, "eval_samples_per_second": 38.344, "eval_steps_per_second": 9.586, "eval_tts_loss": 5.977993885297463, "step": 6000 }, { "epoch": 0.05342729472438581, "grad_norm": 2.8105592727661133, "learning_rate": 9.998034954899564e-05, "loss": 3.5391, "step": 6050 }, { "epoch": 0.05386884261466999, "grad_norm": 2.575618267059326, "learning_rate": 9.997995806353187e-05, "loss": 3.4848, "step": 6100 }, { "epoch": 0.05431039050495417, "grad_norm": 1.944770336151123, "learning_rate": 9.997956271754013e-05, "loss": 3.4319, "step": 6150 }, { "epoch": 0.05475193839523835, "grad_norm": 2.9254565238952637, "learning_rate": 9.997916351105098e-05, "loss": 3.5138, "step": 6200 }, { "epoch": 0.05519348628552253, "grad_norm": 2.876880645751953, "learning_rate": 9.997876044409529e-05, "loss": 3.2436, "step": 6250 }, { "epoch": 0.05563503417580671, "grad_norm": 1.5080561637878418, "learning_rate": 9.997835351670415e-05, "loss": 3.3394, "step": 6300 }, { "epoch": 0.05607658206609089, "grad_norm": 2.99318790435791, "learning_rate": 9.9977942728909e-05, "loss": 3.2061, "step": 6350 }, { "epoch": 0.05651812995637507, "grad_norm": 2.1895508766174316, "learning_rate": 9.997752808074157e-05, "loss": 3.586, "step": 6400 }, { "epoch": 0.05695967784665925, "grad_norm": 2.089017868041992, "learning_rate": 9.997710957223389e-05, "loss": 3.6418, "step": 6450 }, { "epoch": 0.05740122573694343, "grad_norm": 1.3540375232696533, "learning_rate": 9.997668720341831e-05, "loss": 3.5735, "step": 6500 }, { "epoch": 0.05784277362722761, "grad_norm": 2.3487038612365723, "learning_rate": 9.997626097432741e-05, "loss": 3.4285, "step": 6550 }, { "epoch": 0.05828432151751179, "grad_norm": 2.5517020225524902, "learning_rate": 9.997583088499415e-05, "loss": 3.5823, "step": 6600 }, { "epoch": 0.05872586940779597, "grad_norm": 1.7537412643432617, "learning_rate": 9.997539693545174e-05, "loss": 3.1481, "step": 6650 }, { "epoch": 0.05916741729808015, "grad_norm": 2.911041736602783, "learning_rate": 9.99749591257337e-05, "loss": 3.7261, "step": 6700 }, { "epoch": 0.05960896518836433, "grad_norm": 7.761764049530029, "learning_rate": 9.997451745587382e-05, "loss": 3.8374, "step": 6750 }, { "epoch": 0.06005051307864851, "grad_norm": 6.103822708129883, "learning_rate": 9.997407192590625e-05, "loss": 3.1474, "step": 6800 }, { "epoch": 0.06049206096893269, "grad_norm": 8.443099021911621, "learning_rate": 9.99736225358654e-05, "loss": 3.4926, "step": 6850 }, { "epoch": 0.06093360885921687, "grad_norm": 6.040070056915283, "learning_rate": 9.997316928578595e-05, "loss": 3.1745, "step": 6900 }, { "epoch": 0.06137515674950105, "grad_norm": 1.7983872890472412, "learning_rate": 9.997271217570296e-05, "loss": 3.5779, "step": 6950 }, { "epoch": 0.06181670463978523, "grad_norm": 2.881488800048828, "learning_rate": 9.99722512056517e-05, "loss": 3.6113, "step": 7000 }, { "epoch": 0.06225825253006941, "grad_norm": 2.0535004138946533, "learning_rate": 9.997178637566777e-05, "loss": 3.5762, "step": 7050 }, { "epoch": 0.06269980042035359, "grad_norm": 2.7159841060638428, "learning_rate": 9.997131768578711e-05, "loss": 3.4012, "step": 7100 }, { "epoch": 0.06314134831063777, "grad_norm": 3.324490785598755, "learning_rate": 9.997084513604591e-05, "loss": 3.8183, "step": 7150 }, { "epoch": 0.06358289620092195, "grad_norm": 1.6304800510406494, "learning_rate": 9.997036872648064e-05, "loss": 3.4161, "step": 7200 }, { "epoch": 0.06402444409120613, "grad_norm": 2.0595145225524902, "learning_rate": 9.996988845712813e-05, "loss": 3.4944, "step": 7250 }, { "epoch": 0.0644659919814903, "grad_norm": 2.9137229919433594, "learning_rate": 9.996940432802548e-05, "loss": 3.6446, "step": 7300 }, { "epoch": 0.06490753987177449, "grad_norm": 2.6019785404205322, "learning_rate": 9.996891633921007e-05, "loss": 3.5755, "step": 7350 }, { "epoch": 0.06534908776205867, "grad_norm": 1.3080384731292725, "learning_rate": 9.99684244907196e-05, "loss": 3.432, "step": 7400 }, { "epoch": 0.06579063565234285, "grad_norm": 1.085084080696106, "learning_rate": 9.996792878259204e-05, "loss": 3.5939, "step": 7450 }, { "epoch": 0.06623218354262703, "grad_norm": 2.416012763977051, "learning_rate": 9.996742921486573e-05, "loss": 3.3125, "step": 7500 }, { "epoch": 0.0666737314329112, "grad_norm": 2.329331159591675, "learning_rate": 9.99669257875792e-05, "loss": 3.4727, "step": 7550 }, { "epoch": 0.06711527932319539, "grad_norm": 1.2133623361587524, "learning_rate": 9.996641850077135e-05, "loss": 3.6817, "step": 7600 }, { "epoch": 0.06755682721347957, "grad_norm": 3.447230577468872, "learning_rate": 9.99659073544814e-05, "loss": 3.3487, "step": 7650 }, { "epoch": 0.06799837510376375, "grad_norm": 1.8407869338989258, "learning_rate": 9.99653923487488e-05, "loss": 3.4952, "step": 7700 }, { "epoch": 0.06843992299404793, "grad_norm": 3.4017183780670166, "learning_rate": 9.996487348361331e-05, "loss": 3.5987, "step": 7750 }, { "epoch": 0.06888147088433211, "grad_norm": 2.014235734939575, "learning_rate": 9.996435075911507e-05, "loss": 3.4913, "step": 7800 }, { "epoch": 0.0693230187746163, "grad_norm": 8.113083839416504, "learning_rate": 9.99638241752944e-05, "loss": 3.2701, "step": 7850 }, { "epoch": 0.06976456666490048, "grad_norm": 3.6530191898345947, "learning_rate": 9.996329373219199e-05, "loss": 3.7682, "step": 7900 }, { "epoch": 0.07020611455518466, "grad_norm": 3.110872507095337, "learning_rate": 9.996275942984882e-05, "loss": 2.9624, "step": 7950 }, { "epoch": 0.07064766244546884, "grad_norm": 5.682243347167969, "learning_rate": 9.996222126830616e-05, "loss": 3.1075, "step": 8000 }, { "epoch": 0.07108921033575302, "grad_norm": 8.339673042297363, "learning_rate": 9.996167924760556e-05, "loss": 3.4832, "step": 8050 }, { "epoch": 0.0715307582260372, "grad_norm": 2.9629604816436768, "learning_rate": 9.996113336778891e-05, "loss": 3.5399, "step": 8100 }, { "epoch": 0.07197230611632138, "grad_norm": 4.072164058685303, "learning_rate": 9.996058362889837e-05, "loss": 3.3337, "step": 8150 }, { "epoch": 0.07241385400660556, "grad_norm": 2.1256349086761475, "learning_rate": 9.99600300309764e-05, "loss": 3.4296, "step": 8200 }, { "epoch": 0.07285540189688974, "grad_norm": 0.9664581418037415, "learning_rate": 9.995947257406575e-05, "loss": 3.3867, "step": 8250 }, { "epoch": 0.07329694978717392, "grad_norm": 2.7981796264648438, "learning_rate": 9.995891125820948e-05, "loss": 3.5777, "step": 8300 }, { "epoch": 0.0737384976774581, "grad_norm": 2.2892305850982666, "learning_rate": 9.995834608345098e-05, "loss": 3.448, "step": 8350 }, { "epoch": 0.07418004556774228, "grad_norm": 3.462890386581421, "learning_rate": 9.995777704983387e-05, "loss": 3.6835, "step": 8400 }, { "epoch": 0.07462159345802646, "grad_norm": 2.887977123260498, "learning_rate": 9.995720415740209e-05, "loss": 3.8102, "step": 8450 }, { "epoch": 0.07506314134831064, "grad_norm": 1.103590726852417, "learning_rate": 9.995662740619993e-05, "loss": 3.3465, "step": 8500 }, { "epoch": 0.07550468923859482, "grad_norm": 1.5264739990234375, "learning_rate": 9.995604679627193e-05, "loss": 3.3524, "step": 8550 }, { "epoch": 0.075946237128879, "grad_norm": 10.508204460144043, "learning_rate": 9.995546232766293e-05, "loss": 2.9848, "step": 8600 }, { "epoch": 0.07638778501916318, "grad_norm": 11.036320686340332, "learning_rate": 9.995487400041806e-05, "loss": 3.6104, "step": 8650 }, { "epoch": 0.07682933290944736, "grad_norm": 0.900551974773407, "learning_rate": 9.995428181458279e-05, "loss": 3.2115, "step": 8700 }, { "epoch": 0.07727088079973154, "grad_norm": 1.0776323080062866, "learning_rate": 9.995368577020285e-05, "loss": 3.378, "step": 8750 }, { "epoch": 0.07771242869001573, "grad_norm": 1.9495457410812378, "learning_rate": 9.995308586732427e-05, "loss": 3.134, "step": 8800 }, { "epoch": 0.0781539765802999, "grad_norm": 2.638998508453369, "learning_rate": 9.99524821059934e-05, "loss": 3.158, "step": 8850 }, { "epoch": 0.07859552447058409, "grad_norm": 1.9697933197021484, "learning_rate": 9.995187448625688e-05, "loss": 3.4271, "step": 8900 }, { "epoch": 0.07903707236086827, "grad_norm": 1.1985995769500732, "learning_rate": 9.995126300816163e-05, "loss": 3.3212, "step": 8950 }, { "epoch": 0.07947862025115245, "grad_norm": 5.711977005004883, "learning_rate": 9.995064767175488e-05, "loss": 3.256, "step": 9000 }, { "epoch": 0.07947862025115245, "eval_asr_loss": 0.8664082059474173, "eval_loss": 3.2935588359832764, "eval_runtime": 19.9624, "eval_samples_per_second": 38.472, "eval_steps_per_second": 9.618, "eval_tts_loss": 5.950566485041761, "step": 9000 }, { "epoch": 0.07992016814143663, "grad_norm": 2.0665061473846436, "learning_rate": 9.995002847708418e-05, "loss": 3.5921, "step": 9050 }, { "epoch": 0.0803617160317208, "grad_norm": 2.0567665100097656, "learning_rate": 9.994940542419733e-05, "loss": 3.4312, "step": 9100 }, { "epoch": 0.08080326392200499, "grad_norm": 5.3886847496032715, "learning_rate": 9.994879108917332e-05, "loss": 3.2038, "step": 9150 }, { "epoch": 0.08124481181228917, "grad_norm": 5.97287130355835, "learning_rate": 9.994816039716079e-05, "loss": 3.4363, "step": 9200 }, { "epoch": 0.08168635970257335, "grad_norm": 3.153740406036377, "learning_rate": 9.994752584707642e-05, "loss": 3.3041, "step": 9250 }, { "epoch": 0.08212790759285753, "grad_norm": 2.116394519805908, "learning_rate": 9.994688743896923e-05, "loss": 3.906, "step": 9300 }, { "epoch": 0.08256945548314171, "grad_norm": 2.2864694595336914, "learning_rate": 9.994624517288851e-05, "loss": 3.5515, "step": 9350 }, { "epoch": 0.08301100337342589, "grad_norm": 6.8061723709106445, "learning_rate": 9.994559904888388e-05, "loss": 3.5991, "step": 9400 }, { "epoch": 0.08345255126371007, "grad_norm": 5.035069465637207, "learning_rate": 9.994494906700527e-05, "loss": 3.7135, "step": 9450 }, { "epoch": 0.08389409915399425, "grad_norm": 2.2650699615478516, "learning_rate": 9.994429522730284e-05, "loss": 3.3555, "step": 9500 }, { "epoch": 0.08433564704427843, "grad_norm": 3.1491940021514893, "learning_rate": 9.994363752982714e-05, "loss": 3.0579, "step": 9550 }, { "epoch": 0.08477719493456261, "grad_norm": 3.5358498096466064, "learning_rate": 9.994297597462894e-05, "loss": 3.6277, "step": 9600 }, { "epoch": 0.08521874282484679, "grad_norm": 3.785895824432373, "learning_rate": 9.994231056175936e-05, "loss": 3.2452, "step": 9650 }, { "epoch": 0.08566029071513097, "grad_norm": 1.632702350616455, "learning_rate": 9.994164129126977e-05, "loss": 3.1927, "step": 9700 }, { "epoch": 0.08610183860541515, "grad_norm": 3.5743367671966553, "learning_rate": 9.99409681632119e-05, "loss": 3.6792, "step": 9750 }, { "epoch": 0.08654338649569933, "grad_norm": 9.39389419555664, "learning_rate": 9.994029117763773e-05, "loss": 3.1351, "step": 9800 }, { "epoch": 0.08698493438598351, "grad_norm": 2.9739062786102295, "learning_rate": 9.993961033459953e-05, "loss": 3.1706, "step": 9850 }, { "epoch": 0.08742648227626769, "grad_norm": 3.320171356201172, "learning_rate": 9.99389256341499e-05, "loss": 3.6547, "step": 9900 }, { "epoch": 0.08786803016655187, "grad_norm": 8.49543285369873, "learning_rate": 9.993823707634176e-05, "loss": 3.701, "step": 9950 }, { "epoch": 0.08830957805683605, "grad_norm": 1.3553107976913452, "learning_rate": 9.993754466122827e-05, "loss": 3.4541, "step": 10000 }, { "epoch": 0.08875112594712023, "grad_norm": 1.0776971578598022, "learning_rate": 9.993684838886289e-05, "loss": 3.7029, "step": 10050 }, { "epoch": 0.08919267383740441, "grad_norm": 1.6787606477737427, "learning_rate": 9.993614825929945e-05, "loss": 3.6013, "step": 10100 }, { "epoch": 0.08963422172768859, "grad_norm": 1.4465861320495605, "learning_rate": 9.993544427259198e-05, "loss": 3.2941, "step": 10150 }, { "epoch": 0.09007576961797277, "grad_norm": 3.221482276916504, "learning_rate": 9.99347364287949e-05, "loss": 3.5483, "step": 10200 }, { "epoch": 0.09051731750825695, "grad_norm": 1.0219640731811523, "learning_rate": 9.993402472796284e-05, "loss": 3.2682, "step": 10250 }, { "epoch": 0.09095886539854113, "grad_norm": 4.225593090057373, "learning_rate": 9.993330917015082e-05, "loss": 3.4346, "step": 10300 }, { "epoch": 0.09140041328882531, "grad_norm": 1.145766019821167, "learning_rate": 9.993258975541408e-05, "loss": 3.5205, "step": 10350 }, { "epoch": 0.09184196117910949, "grad_norm": 4.837847709655762, "learning_rate": 9.99318664838082e-05, "loss": 3.3985, "step": 10400 }, { "epoch": 0.09228350906939367, "grad_norm": 3.118101119995117, "learning_rate": 9.993113935538903e-05, "loss": 3.6283, "step": 10450 }, { "epoch": 0.09272505695967785, "grad_norm": 3.6168124675750732, "learning_rate": 9.993040837021277e-05, "loss": 3.1753, "step": 10500 }, { "epoch": 0.09316660484996203, "grad_norm": 3.866116523742676, "learning_rate": 9.992967352833584e-05, "loss": 3.1334, "step": 10550 }, { "epoch": 0.09360815274024621, "grad_norm": 2.1199982166290283, "learning_rate": 9.992893482981505e-05, "loss": 3.3849, "step": 10600 }, { "epoch": 0.09404970063053039, "grad_norm": 1.948744297027588, "learning_rate": 9.99281922747074e-05, "loss": 3.3414, "step": 10650 }, { "epoch": 0.09449124852081457, "grad_norm": 5.387505054473877, "learning_rate": 9.99274458630703e-05, "loss": 3.6585, "step": 10700 }, { "epoch": 0.09493279641109875, "grad_norm": 2.675678253173828, "learning_rate": 9.992669559496136e-05, "loss": 3.1715, "step": 10750 }, { "epoch": 0.09537434430138293, "grad_norm": 1.5642513036727905, "learning_rate": 9.992594147043856e-05, "loss": 3.2574, "step": 10800 }, { "epoch": 0.09581589219166711, "grad_norm": 2.7304491996765137, "learning_rate": 9.992518348956014e-05, "loss": 3.547, "step": 10850 }, { "epoch": 0.09625744008195129, "grad_norm": 3.1260108947753906, "learning_rate": 9.992442165238465e-05, "loss": 3.6459, "step": 10900 }, { "epoch": 0.09669898797223547, "grad_norm": 1.5745757818222046, "learning_rate": 9.992365595897092e-05, "loss": 3.0718, "step": 10950 }, { "epoch": 0.09714053586251965, "grad_norm": 4.948554515838623, "learning_rate": 9.992288640937812e-05, "loss": 2.9318, "step": 11000 }, { "epoch": 0.09758208375280383, "grad_norm": 3.8740906715393066, "learning_rate": 9.992211300366568e-05, "loss": 3.5334, "step": 11050 }, { "epoch": 0.09802363164308801, "grad_norm": 4.66175651550293, "learning_rate": 9.992133574189335e-05, "loss": 3.4052, "step": 11100 }, { "epoch": 0.0984651795333722, "grad_norm": 3.1660337448120117, "learning_rate": 9.992055462412113e-05, "loss": 3.4354, "step": 11150 }, { "epoch": 0.09890672742365637, "grad_norm": 3.1641385555267334, "learning_rate": 9.99197696504094e-05, "loss": 3.4509, "step": 11200 }, { "epoch": 0.09934827531394055, "grad_norm": 2.3828437328338623, "learning_rate": 9.991898082081874e-05, "loss": 3.1644, "step": 11250 }, { "epoch": 0.09978982320422473, "grad_norm": 3.6782753467559814, "learning_rate": 9.991818813541014e-05, "loss": 3.6032, "step": 11300 }, { "epoch": 0.10023137109450891, "grad_norm": 1.4960347414016724, "learning_rate": 9.991739159424481e-05, "loss": 3.3263, "step": 11350 }, { "epoch": 0.1006729189847931, "grad_norm": 3.1267852783203125, "learning_rate": 9.991659119738423e-05, "loss": 3.4492, "step": 11400 }, { "epoch": 0.10111446687507727, "grad_norm": 1.307042121887207, "learning_rate": 9.991578694489028e-05, "loss": 3.2172, "step": 11450 }, { "epoch": 0.10155601476536145, "grad_norm": 2.0429134368896484, "learning_rate": 9.991497883682506e-05, "loss": 3.171, "step": 11500 }, { "epoch": 0.10199756265564563, "grad_norm": 2.784212350845337, "learning_rate": 9.991416687325101e-05, "loss": 3.4447, "step": 11550 }, { "epoch": 0.10243911054592982, "grad_norm": 6.423585414886475, "learning_rate": 9.991335105423081e-05, "loss": 3.2792, "step": 11600 }, { "epoch": 0.102880658436214, "grad_norm": 2.206615924835205, "learning_rate": 9.99125313798275e-05, "loss": 3.2817, "step": 11650 }, { "epoch": 0.10332220632649818, "grad_norm": 4.011673450469971, "learning_rate": 9.991170785010438e-05, "loss": 3.692, "step": 11700 }, { "epoch": 0.10376375421678236, "grad_norm": 1.4347039461135864, "learning_rate": 9.991088046512507e-05, "loss": 3.3256, "step": 11750 }, { "epoch": 0.10420530210706654, "grad_norm": 3.251509428024292, "learning_rate": 9.991004922495348e-05, "loss": 3.3607, "step": 11800 }, { "epoch": 0.10464684999735072, "grad_norm": 5.043297290802002, "learning_rate": 9.990921412965381e-05, "loss": 3.0377, "step": 11850 }, { "epoch": 0.1050883978876349, "grad_norm": 1.7166991233825684, "learning_rate": 9.990837517929057e-05, "loss": 3.2673, "step": 11900 }, { "epoch": 0.10552994577791908, "grad_norm": 2.2065978050231934, "learning_rate": 9.990753237392854e-05, "loss": 3.5916, "step": 11950 }, { "epoch": 0.10597149366820326, "grad_norm": 3.4584174156188965, "learning_rate": 9.990668571363286e-05, "loss": 3.1674, "step": 12000 }, { "epoch": 0.10597149366820326, "eval_asr_loss": 0.8610385786988017, "eval_loss": 3.250169515609741, "eval_runtime": 20.2526, "eval_samples_per_second": 37.921, "eval_steps_per_second": 9.48, "eval_tts_loss": 6.012793056516366, "step": 12000 }, { "epoch": 0.10641304155848744, "grad_norm": 3.1279189586639404, "learning_rate": 9.99058351984689e-05, "loss": 3.5257, "step": 12050 }, { "epoch": 0.10685458944877162, "grad_norm": 5.271905899047852, "learning_rate": 9.990498082850234e-05, "loss": 3.2174, "step": 12100 }, { "epoch": 0.1072961373390558, "grad_norm": 2.162796974182129, "learning_rate": 9.990412260379922e-05, "loss": 3.4223, "step": 12150 }, { "epoch": 0.10773768522933998, "grad_norm": 2.182065725326538, "learning_rate": 9.990326052442579e-05, "loss": 3.3126, "step": 12200 }, { "epoch": 0.10817923311962416, "grad_norm": 1.2493520975112915, "learning_rate": 9.990239459044866e-05, "loss": 3.5304, "step": 12250 }, { "epoch": 0.10862078100990834, "grad_norm": 2.064229726791382, "learning_rate": 9.99015248019347e-05, "loss": 3.4558, "step": 12300 }, { "epoch": 0.10906232890019252, "grad_norm": 2.22754168510437, "learning_rate": 9.99006511589511e-05, "loss": 3.3677, "step": 12350 }, { "epoch": 0.1095038767904767, "grad_norm": 4.160098075866699, "learning_rate": 9.989977366156535e-05, "loss": 3.5189, "step": 12400 }, { "epoch": 0.10994542468076088, "grad_norm": 0.9621350169181824, "learning_rate": 9.989889230984522e-05, "loss": 3.7358, "step": 12450 }, { "epoch": 0.11038697257104506, "grad_norm": 1.2985560894012451, "learning_rate": 9.989800710385879e-05, "loss": 3.6572, "step": 12500 }, { "epoch": 0.11082852046132924, "grad_norm": 0.9708568453788757, "learning_rate": 9.989711804367443e-05, "loss": 3.2729, "step": 12550 }, { "epoch": 0.11127006835161342, "grad_norm": 1.924854040145874, "learning_rate": 9.989622512936083e-05, "loss": 3.4972, "step": 12600 }, { "epoch": 0.1117116162418976, "grad_norm": 4.869857311248779, "learning_rate": 9.989532836098691e-05, "loss": 3.5102, "step": 12650 }, { "epoch": 0.11215316413218178, "grad_norm": 5.745826244354248, "learning_rate": 9.9894427738622e-05, "loss": 3.3836, "step": 12700 }, { "epoch": 0.11259471202246596, "grad_norm": 5.497977256774902, "learning_rate": 9.989352326233566e-05, "loss": 3.3298, "step": 12750 }, { "epoch": 0.11303625991275014, "grad_norm": 9.22269344329834, "learning_rate": 9.98926149321977e-05, "loss": 3.3542, "step": 12800 }, { "epoch": 0.11347780780303432, "grad_norm": 2.255528211593628, "learning_rate": 9.989172102972332e-05, "loss": 3.8121, "step": 12850 }, { "epoch": 0.1139193556933185, "grad_norm": 2.629471778869629, "learning_rate": 9.98908050691665e-05, "loss": 3.456, "step": 12900 }, { "epoch": 0.11436090358360268, "grad_norm": 6.432432174682617, "learning_rate": 9.988988525496805e-05, "loss": 3.1076, "step": 12950 }, { "epoch": 0.11480245147388686, "grad_norm": 1.9251166582107544, "learning_rate": 9.988896158719903e-05, "loss": 3.5083, "step": 13000 }, { "epoch": 0.11524399936417104, "grad_norm": 1.9608491659164429, "learning_rate": 9.988803406593077e-05, "loss": 3.4977, "step": 13050 }, { "epoch": 0.11568554725445522, "grad_norm": 2.201385259628296, "learning_rate": 9.988710269123491e-05, "loss": 3.3394, "step": 13100 }, { "epoch": 0.1161270951447394, "grad_norm": 0.8397690653800964, "learning_rate": 9.98861674631834e-05, "loss": 3.0705, "step": 13150 }, { "epoch": 0.11656864303502358, "grad_norm": 3.4315147399902344, "learning_rate": 9.988522838184848e-05, "loss": 2.9964, "step": 13200 }, { "epoch": 0.11701019092530776, "grad_norm": 1.0664474964141846, "learning_rate": 9.988428544730267e-05, "loss": 3.5425, "step": 13250 }, { "epoch": 0.11745173881559194, "grad_norm": 1.7559316158294678, "learning_rate": 9.988333865961883e-05, "loss": 3.4283, "step": 13300 }, { "epoch": 0.11789328670587612, "grad_norm": 1.3742626905441284, "learning_rate": 9.988238801887006e-05, "loss": 3.0457, "step": 13350 }, { "epoch": 0.1183348345961603, "grad_norm": 6.306196212768555, "learning_rate": 9.988143352512982e-05, "loss": 3.4985, "step": 13400 }, { "epoch": 0.11877638248644448, "grad_norm": 0.7708596587181091, "learning_rate": 9.98804751784718e-05, "loss": 3.3379, "step": 13450 }, { "epoch": 0.11921793037672866, "grad_norm": 7.315723419189453, "learning_rate": 9.987951297897008e-05, "loss": 3.4271, "step": 13500 }, { "epoch": 0.11965947826701284, "grad_norm": 1.3316899538040161, "learning_rate": 9.987854692669894e-05, "loss": 3.362, "step": 13550 }, { "epoch": 0.12010102615729702, "grad_norm": 2.5768868923187256, "learning_rate": 9.9877577021733e-05, "loss": 3.7057, "step": 13600 }, { "epoch": 0.1205425740475812, "grad_norm": 1.0544745922088623, "learning_rate": 9.987660326414718e-05, "loss": 3.2666, "step": 13650 }, { "epoch": 0.12098412193786538, "grad_norm": 1.6136611700057983, "learning_rate": 9.98756256540167e-05, "loss": 3.019, "step": 13700 }, { "epoch": 0.12142566982814956, "grad_norm": 3.0674450397491455, "learning_rate": 9.987464419141707e-05, "loss": 3.3559, "step": 13750 }, { "epoch": 0.12186721771843374, "grad_norm": 5.650421142578125, "learning_rate": 9.987365887642412e-05, "loss": 3.1113, "step": 13800 }, { "epoch": 0.12230876560871792, "grad_norm": 6.18209981918335, "learning_rate": 9.987266970911393e-05, "loss": 3.6087, "step": 13850 }, { "epoch": 0.1227503134990021, "grad_norm": 3.334939956665039, "learning_rate": 9.98716766895629e-05, "loss": 3.2844, "step": 13900 }, { "epoch": 0.12319186138928628, "grad_norm": 1.9125257730484009, "learning_rate": 9.987067981784774e-05, "loss": 3.5561, "step": 13950 }, { "epoch": 0.12363340927957046, "grad_norm": 2.013408899307251, "learning_rate": 9.986967909404547e-05, "loss": 3.7224, "step": 14000 }, { "epoch": 0.12407495716985464, "grad_norm": 1.1568374633789062, "learning_rate": 9.986867451823337e-05, "loss": 3.348, "step": 14050 }, { "epoch": 0.12451650506013882, "grad_norm": 2.9201159477233887, "learning_rate": 9.986766609048904e-05, "loss": 3.358, "step": 14100 }, { "epoch": 0.124958052950423, "grad_norm": 6.0850982666015625, "learning_rate": 9.986665381089038e-05, "loss": 3.3947, "step": 14150 }, { "epoch": 0.12539960084070717, "grad_norm": 2.2208523750305176, "learning_rate": 9.986563767951555e-05, "loss": 3.3521, "step": 14200 }, { "epoch": 0.12584114873099136, "grad_norm": 3.7602078914642334, "learning_rate": 9.986461769644306e-05, "loss": 3.3009, "step": 14250 }, { "epoch": 0.12628269662127553, "grad_norm": 5.159857749938965, "learning_rate": 9.98635938617517e-05, "loss": 3.0826, "step": 14300 }, { "epoch": 0.12672424451155972, "grad_norm": 1.6318804025650024, "learning_rate": 9.986256617552054e-05, "loss": 3.1461, "step": 14350 }, { "epoch": 0.1271657924018439, "grad_norm": 1.6564624309539795, "learning_rate": 9.986153463782897e-05, "loss": 3.6095, "step": 14400 }, { "epoch": 0.12760734029212809, "grad_norm": 6.902859210968018, "learning_rate": 9.986049924875666e-05, "loss": 3.4435, "step": 14450 }, { "epoch": 0.12804888818241225, "grad_norm": 2.513582229614258, "learning_rate": 9.985946000838359e-05, "loss": 3.6592, "step": 14500 }, { "epoch": 0.12849043607269645, "grad_norm": 1.5714596509933472, "learning_rate": 9.985841691679004e-05, "loss": 3.3225, "step": 14550 }, { "epoch": 0.1289319839629806, "grad_norm": 2.4705002307891846, "learning_rate": 9.985736997405655e-05, "loss": 3.3319, "step": 14600 }, { "epoch": 0.1293735318532648, "grad_norm": 0.8616177439689636, "learning_rate": 9.985631918026401e-05, "loss": 3.1653, "step": 14650 }, { "epoch": 0.12981507974354897, "grad_norm": 1.7353193759918213, "learning_rate": 9.985526453549359e-05, "loss": 3.4691, "step": 14700 }, { "epoch": 0.13025662763383317, "grad_norm": 5.408633708953857, "learning_rate": 9.985420603982673e-05, "loss": 3.5129, "step": 14750 }, { "epoch": 0.13069817552411733, "grad_norm": 1.2819982767105103, "learning_rate": 9.985314369334523e-05, "loss": 3.2823, "step": 14800 }, { "epoch": 0.13113972341440153, "grad_norm": 8.26905632019043, "learning_rate": 9.98520774961311e-05, "loss": 3.2017, "step": 14850 }, { "epoch": 0.1315812713046857, "grad_norm": 2.404160737991333, "learning_rate": 9.985100744826674e-05, "loss": 3.2397, "step": 14900 }, { "epoch": 0.1320228191949699, "grad_norm": 1.9694509506225586, "learning_rate": 9.984993354983477e-05, "loss": 3.7057, "step": 14950 }, { "epoch": 0.13246436708525405, "grad_norm": 7.179697036743164, "learning_rate": 9.984885580091814e-05, "loss": 2.8689, "step": 15000 }, { "epoch": 0.13246436708525405, "eval_asr_loss": 0.8746035999883381, "eval_loss": 3.1943347454071045, "eval_runtime": 20.0975, "eval_samples_per_second": 38.214, "eval_steps_per_second": 9.553, "eval_tts_loss": 5.963482838455088, "step": 15000 }, { "epoch": 0.13290591497553825, "grad_norm": 1.963370680809021, "learning_rate": 9.98477742016001e-05, "loss": 3.0848, "step": 15050 }, { "epoch": 0.1333474628658224, "grad_norm": 5.354642868041992, "learning_rate": 9.984668875196421e-05, "loss": 3.4422, "step": 15100 }, { "epoch": 0.1337890107561066, "grad_norm": 0.9628197550773621, "learning_rate": 9.98455994520943e-05, "loss": 3.6956, "step": 15150 }, { "epoch": 0.13423055864639077, "grad_norm": 1.9591059684753418, "learning_rate": 9.984450630207451e-05, "loss": 3.8912, "step": 15200 }, { "epoch": 0.13467210653667497, "grad_norm": 2.4057066440582275, "learning_rate": 9.984340930198927e-05, "loss": 3.4458, "step": 15250 }, { "epoch": 0.13511365442695913, "grad_norm": 1.3141722679138184, "learning_rate": 9.984230845192336e-05, "loss": 3.3671, "step": 15300 }, { "epoch": 0.13555520231724333, "grad_norm": 3.438781261444092, "learning_rate": 9.984120375196174e-05, "loss": 3.2085, "step": 15350 }, { "epoch": 0.1359967502075275, "grad_norm": 1.443537712097168, "learning_rate": 9.984011741091279e-05, "loss": 3.2912, "step": 15400 }, { "epoch": 0.1364382980978117, "grad_norm": 2.5141725540161133, "learning_rate": 9.983900508840976e-05, "loss": 3.4251, "step": 15450 }, { "epoch": 0.13687984598809586, "grad_norm": 3.5416128635406494, "learning_rate": 9.98378889162662e-05, "loss": 3.4953, "step": 15500 }, { "epoch": 0.13732139387838005, "grad_norm": 3.750843048095703, "learning_rate": 9.983676889456833e-05, "loss": 3.6289, "step": 15550 }, { "epoch": 0.13776294176866422, "grad_norm": 8.563606262207031, "learning_rate": 9.983564502340267e-05, "loss": 3.3268, "step": 15600 }, { "epoch": 0.1382044896589484, "grad_norm": 3.2222371101379395, "learning_rate": 9.983451730285603e-05, "loss": 3.329, "step": 15650 }, { "epoch": 0.1386460375492326, "grad_norm": 2.218827724456787, "learning_rate": 9.983338573301552e-05, "loss": 3.8973, "step": 15700 }, { "epoch": 0.13908758543951677, "grad_norm": 1.1404446363449097, "learning_rate": 9.983225031396852e-05, "loss": 3.5931, "step": 15750 }, { "epoch": 0.13952913332980096, "grad_norm": 0.8973853588104248, "learning_rate": 9.983111104580276e-05, "loss": 3.2535, "step": 15800 }, { "epoch": 0.13997068122008513, "grad_norm": 8.366558074951172, "learning_rate": 9.982996792860623e-05, "loss": 3.0558, "step": 15850 }, { "epoch": 0.14041222911036932, "grad_norm": 1.4844486713409424, "learning_rate": 9.982882096246722e-05, "loss": 3.4828, "step": 15900 }, { "epoch": 0.1408537770006535, "grad_norm": 2.583129644393921, "learning_rate": 9.982767014747432e-05, "loss": 3.6149, "step": 15950 }, { "epoch": 0.14129532489093768, "grad_norm": 3.2486109733581543, "learning_rate": 9.982651548371644e-05, "loss": 3.1331, "step": 16000 }, { "epoch": 0.14173687278122185, "grad_norm": 2.9078361988067627, "learning_rate": 9.982535697128275e-05, "loss": 2.9418, "step": 16050 }, { "epoch": 0.14217842067150605, "grad_norm": 1.986458659172058, "learning_rate": 9.982419461026273e-05, "loss": 3.4626, "step": 16100 }, { "epoch": 0.1426199685617902, "grad_norm": 5.311156272888184, "learning_rate": 9.982302840074617e-05, "loss": 3.1937, "step": 16150 }, { "epoch": 0.1430615164520744, "grad_norm": 1.4057707786560059, "learning_rate": 9.982185834282319e-05, "loss": 4.0044, "step": 16200 }, { "epoch": 0.14350306434235857, "grad_norm": 1.7638394832611084, "learning_rate": 9.98206844365841e-05, "loss": 3.657, "step": 16250 }, { "epoch": 0.14394461223264277, "grad_norm": 2.069039821624756, "learning_rate": 9.98195066821196e-05, "loss": 3.5534, "step": 16300 }, { "epoch": 0.14438616012292693, "grad_norm": 7.656248092651367, "learning_rate": 9.981832507952067e-05, "loss": 3.1842, "step": 16350 }, { "epoch": 0.14482770801321113, "grad_norm": 3.7678370475769043, "learning_rate": 9.981713962887859e-05, "loss": 3.515, "step": 16400 }, { "epoch": 0.1452692559034953, "grad_norm": 2.7892682552337646, "learning_rate": 9.98159503302849e-05, "loss": 3.6017, "step": 16450 }, { "epoch": 0.1457108037937795, "grad_norm": 1.75071120262146, "learning_rate": 9.981475718383147e-05, "loss": 3.6968, "step": 16500 }, { "epoch": 0.14615235168406365, "grad_norm": 0.988599419593811, "learning_rate": 9.981356018961047e-05, "loss": 3.638, "step": 16550 }, { "epoch": 0.14659389957434785, "grad_norm": 3.183884382247925, "learning_rate": 9.981235934771436e-05, "loss": 3.4087, "step": 16600 }, { "epoch": 0.147035447464632, "grad_norm": 2.3756110668182373, "learning_rate": 9.981115465823587e-05, "loss": 3.493, "step": 16650 }, { "epoch": 0.1474769953549162, "grad_norm": 1.6157623529434204, "learning_rate": 9.980994612126807e-05, "loss": 3.4459, "step": 16700 }, { "epoch": 0.14791854324520037, "grad_norm": 3.4954352378845215, "learning_rate": 9.980873373690431e-05, "loss": 3.6551, "step": 16750 }, { "epoch": 0.14836009113548457, "grad_norm": 2.129396915435791, "learning_rate": 9.980751750523825e-05, "loss": 3.5101, "step": 16800 }, { "epoch": 0.14880163902576873, "grad_norm": 1.6230214834213257, "learning_rate": 9.98062974263638e-05, "loss": 3.3445, "step": 16850 }, { "epoch": 0.14924318691605293, "grad_norm": 2.7362122535705566, "learning_rate": 9.980507350037522e-05, "loss": 3.4352, "step": 16900 }, { "epoch": 0.1496847348063371, "grad_norm": 4.6118364334106445, "learning_rate": 9.980384572736706e-05, "loss": 2.9692, "step": 16950 }, { "epoch": 0.1501262826966213, "grad_norm": 3.5687484741210938, "learning_rate": 9.980261410743414e-05, "loss": 3.669, "step": 17000 }, { "epoch": 0.15056783058690545, "grad_norm": 1.4324193000793457, "learning_rate": 9.980137864067158e-05, "loss": 3.0393, "step": 17050 }, { "epoch": 0.15100937847718965, "grad_norm": 2.652137517929077, "learning_rate": 9.980013932717484e-05, "loss": 3.6648, "step": 17100 }, { "epoch": 0.15145092636747381, "grad_norm": 7.182887554168701, "learning_rate": 9.97988961670396e-05, "loss": 3.3025, "step": 17150 }, { "epoch": 0.151892474257758, "grad_norm": 2.4190828800201416, "learning_rate": 9.979764916036195e-05, "loss": 3.5013, "step": 17200 }, { "epoch": 0.15233402214804218, "grad_norm": 4.525496006011963, "learning_rate": 9.979639830723816e-05, "loss": 3.4685, "step": 17250 }, { "epoch": 0.15277557003832637, "grad_norm": 1.242196798324585, "learning_rate": 9.979514360776486e-05, "loss": 3.3286, "step": 17300 }, { "epoch": 0.15321711792861054, "grad_norm": 4.049784183502197, "learning_rate": 9.979388506203897e-05, "loss": 3.1067, "step": 17350 }, { "epoch": 0.15365866581889473, "grad_norm": 2.2166428565979004, "learning_rate": 9.97926226701577e-05, "loss": 3.1167, "step": 17400 }, { "epoch": 0.1541002137091789, "grad_norm": 1.4584290981292725, "learning_rate": 9.979135643221856e-05, "loss": 3.4869, "step": 17450 }, { "epoch": 0.1545417615994631, "grad_norm": 2.357682228088379, "learning_rate": 9.979008634831934e-05, "loss": 3.3837, "step": 17500 }, { "epoch": 0.15498330948974726, "grad_norm": 3.9010651111602783, "learning_rate": 9.978881241855817e-05, "loss": 3.0948, "step": 17550 }, { "epoch": 0.15542485738003145, "grad_norm": 2.2975761890411377, "learning_rate": 9.978753464303343e-05, "loss": 3.487, "step": 17600 }, { "epoch": 0.15586640527031562, "grad_norm": 1.2210371494293213, "learning_rate": 9.978625302184383e-05, "loss": 3.7763, "step": 17650 }, { "epoch": 0.1563079531605998, "grad_norm": 1.8714485168457031, "learning_rate": 9.978496755508836e-05, "loss": 2.9216, "step": 17700 }, { "epoch": 0.15674950105088398, "grad_norm": 3.4243338108062744, "learning_rate": 9.978367824286629e-05, "loss": 3.5642, "step": 17750 }, { "epoch": 0.15719104894116817, "grad_norm": 1.6219075918197632, "learning_rate": 9.978238508527727e-05, "loss": 3.3517, "step": 17800 }, { "epoch": 0.15763259683145234, "grad_norm": 0.7262994647026062, "learning_rate": 9.97810880824211e-05, "loss": 2.9789, "step": 17850 }, { "epoch": 0.15807414472173653, "grad_norm": 2.3567357063293457, "learning_rate": 9.977981328904049e-05, "loss": 3.4182, "step": 17900 }, { "epoch": 0.1585156926120207, "grad_norm": 2.0587501525878906, "learning_rate": 9.977850867285131e-05, "loss": 3.4566, "step": 17950 }, { "epoch": 0.1589572405023049, "grad_norm": 1.3674755096435547, "learning_rate": 9.977720021169445e-05, "loss": 3.4426, "step": 18000 }, { "epoch": 0.1589572405023049, "eval_asr_loss": 0.8684477998103262, "eval_loss": 3.168827772140503, "eval_runtime": 20.2426, "eval_samples_per_second": 37.94, "eval_steps_per_second": 9.485, "eval_tts_loss": 5.975099919698394, "step": 18000 }, { "epoch": 0.15939878839258906, "grad_norm": 1.7318178415298462, "learning_rate": 9.977588790567097e-05, "loss": 3.1039, "step": 18050 }, { "epoch": 0.15984033628287325, "grad_norm": 4.160914897918701, "learning_rate": 9.977457175488225e-05, "loss": 3.4418, "step": 18100 }, { "epoch": 0.16028188417315742, "grad_norm": 1.6405212879180908, "learning_rate": 9.977325175942992e-05, "loss": 3.537, "step": 18150 }, { "epoch": 0.1607234320634416, "grad_norm": 3.2284727096557617, "learning_rate": 9.977192791941596e-05, "loss": 3.5699, "step": 18200 }, { "epoch": 0.16116497995372578, "grad_norm": 6.879377841949463, "learning_rate": 9.977060023494263e-05, "loss": 3.3869, "step": 18250 }, { "epoch": 0.16160652784400997, "grad_norm": 5.974034309387207, "learning_rate": 9.976926870611247e-05, "loss": 3.3198, "step": 18300 }, { "epoch": 0.16204807573429414, "grad_norm": 1.0853605270385742, "learning_rate": 9.976793333302834e-05, "loss": 3.2399, "step": 18350 }, { "epoch": 0.16248962362457833, "grad_norm": 2.7796192169189453, "learning_rate": 9.976659411579337e-05, "loss": 3.3726, "step": 18400 }, { "epoch": 0.1629311715148625, "grad_norm": 2.083576202392578, "learning_rate": 9.976525105451102e-05, "loss": 3.3063, "step": 18450 }, { "epoch": 0.1633727194051467, "grad_norm": 1.6082487106323242, "learning_rate": 9.976390414928501e-05, "loss": 3.2636, "step": 18500 }, { "epoch": 0.16381426729543086, "grad_norm": 2.3426663875579834, "learning_rate": 9.97625534002194e-05, "loss": 3.4482, "step": 18550 }, { "epoch": 0.16425581518571505, "grad_norm": 4.199110507965088, "learning_rate": 9.976119880741852e-05, "loss": 3.5328, "step": 18600 }, { "epoch": 0.16469736307599922, "grad_norm": 2.411940097808838, "learning_rate": 9.975984037098698e-05, "loss": 3.1049, "step": 18650 }, { "epoch": 0.16513891096628341, "grad_norm": 8.064169883728027, "learning_rate": 9.975847809102974e-05, "loss": 3.2858, "step": 18700 }, { "epoch": 0.16558045885656758, "grad_norm": 2.0680835247039795, "learning_rate": 9.9757111967652e-05, "loss": 3.1414, "step": 18750 }, { "epoch": 0.16602200674685177, "grad_norm": 2.7678062915802, "learning_rate": 9.975574200095927e-05, "loss": 3.1492, "step": 18800 }, { "epoch": 0.16646355463713594, "grad_norm": 3.402519941329956, "learning_rate": 9.975436819105742e-05, "loss": 3.1184, "step": 18850 }, { "epoch": 0.16690510252742013, "grad_norm": 1.3270809650421143, "learning_rate": 9.97529905380525e-05, "loss": 3.4959, "step": 18900 }, { "epoch": 0.1673466504177043, "grad_norm": 7.308635711669922, "learning_rate": 9.975160904205098e-05, "loss": 3.4175, "step": 18950 }, { "epoch": 0.1677881983079885, "grad_norm": 1.5945827960968018, "learning_rate": 9.975022370315952e-05, "loss": 3.2295, "step": 19000 }, { "epoch": 0.16822974619827266, "grad_norm": 4.1863861083984375, "learning_rate": 9.974883452148517e-05, "loss": 3.1561, "step": 19050 }, { "epoch": 0.16867129408855686, "grad_norm": 1.7289392948150635, "learning_rate": 9.97474414971352e-05, "loss": 3.5214, "step": 19100 }, { "epoch": 0.16911284197884102, "grad_norm": 1.1129475831985474, "learning_rate": 9.974604463021722e-05, "loss": 2.9315, "step": 19150 }, { "epoch": 0.16955438986912522, "grad_norm": 1.6852185726165771, "learning_rate": 9.974464392083913e-05, "loss": 3.4969, "step": 19200 }, { "epoch": 0.16999593775940938, "grad_norm": 6.104635238647461, "learning_rate": 9.974323936910913e-05, "loss": 3.6542, "step": 19250 }, { "epoch": 0.17043748564969358, "grad_norm": 2.2778172492980957, "learning_rate": 9.974183097513567e-05, "loss": 3.1306, "step": 19300 }, { "epoch": 0.17087903353997774, "grad_norm": 1.4735571146011353, "learning_rate": 9.974041873902757e-05, "loss": 3.654, "step": 19350 }, { "epoch": 0.17132058143026194, "grad_norm": 3.6428987979888916, "learning_rate": 9.973900266089393e-05, "loss": 3.7674, "step": 19400 }, { "epoch": 0.1717621293205461, "grad_norm": 3.5552337169647217, "learning_rate": 9.97375827408441e-05, "loss": 3.5145, "step": 19450 }, { "epoch": 0.1722036772108303, "grad_norm": 3.519054651260376, "learning_rate": 9.973615897898777e-05, "loss": 3.1466, "step": 19500 }, { "epoch": 0.17264522510111446, "grad_norm": 5.785290241241455, "learning_rate": 9.97347313754349e-05, "loss": 3.207, "step": 19550 }, { "epoch": 0.17308677299139866, "grad_norm": 1.1869070529937744, "learning_rate": 9.973329993029577e-05, "loss": 3.4333, "step": 19600 }, { "epoch": 0.17352832088168282, "grad_norm": 1.620424747467041, "learning_rate": 9.973186464368096e-05, "loss": 3.4715, "step": 19650 }, { "epoch": 0.17396986877196702, "grad_norm": 3.2140984535217285, "learning_rate": 9.973042551570131e-05, "loss": 3.5503, "step": 19700 }, { "epoch": 0.17441141666225118, "grad_norm": 8.555562973022461, "learning_rate": 9.972898254646799e-05, "loss": 3.9534, "step": 19750 }, { "epoch": 0.17485296455253538, "grad_norm": 2.755021095275879, "learning_rate": 9.972753573609245e-05, "loss": 3.2534, "step": 19800 }, { "epoch": 0.17529451244281954, "grad_norm": 2.5277762413024902, "learning_rate": 9.972608508468646e-05, "loss": 3.9156, "step": 19850 }, { "epoch": 0.17573606033310374, "grad_norm": 6.709779739379883, "learning_rate": 9.972463059236207e-05, "loss": 3.3525, "step": 19900 }, { "epoch": 0.1761776082233879, "grad_norm": 2.41070294380188, "learning_rate": 9.972317225923161e-05, "loss": 3.3441, "step": 19950 }, { "epoch": 0.1766191561136721, "grad_norm": 1.5217126607894897, "learning_rate": 9.972171008540774e-05, "loss": 3.2966, "step": 20000 }, { "epoch": 0.17706070400395627, "grad_norm": 5.1036763191223145, "learning_rate": 9.972024407100337e-05, "loss": 3.1964, "step": 20050 }, { "epoch": 0.17750225189424046, "grad_norm": 1.291801929473877, "learning_rate": 9.97187742161318e-05, "loss": 3.6654, "step": 20100 }, { "epoch": 0.17794379978452463, "grad_norm": 1.6413884162902832, "learning_rate": 9.97173005209065e-05, "loss": 3.5594, "step": 20150 }, { "epoch": 0.17838534767480882, "grad_norm": 2.1488759517669678, "learning_rate": 9.971582298544133e-05, "loss": 3.1799, "step": 20200 }, { "epoch": 0.17882689556509299, "grad_norm": 4.286724090576172, "learning_rate": 9.971434160985041e-05, "loss": 3.1923, "step": 20250 }, { "epoch": 0.17926844345537718, "grad_norm": 2.672957420349121, "learning_rate": 9.971285639424817e-05, "loss": 3.216, "step": 20300 }, { "epoch": 0.17970999134566135, "grad_norm": 1.8457285165786743, "learning_rate": 9.971136733874931e-05, "loss": 3.0968, "step": 20350 }, { "epoch": 0.18015153923594554, "grad_norm": 2.780160665512085, "learning_rate": 9.970987444346889e-05, "loss": 3.7793, "step": 20400 }, { "epoch": 0.1805930871262297, "grad_norm": 4.923703670501709, "learning_rate": 9.970837770852218e-05, "loss": 3.0386, "step": 20450 }, { "epoch": 0.1810346350165139, "grad_norm": 6.101312637329102, "learning_rate": 9.97068771340248e-05, "loss": 3.6275, "step": 20500 }, { "epoch": 0.18147618290679807, "grad_norm": 1.9402213096618652, "learning_rate": 9.970537272009269e-05, "loss": 3.0918, "step": 20550 }, { "epoch": 0.18191773079708226, "grad_norm": 0.9089447259902954, "learning_rate": 9.970386446684201e-05, "loss": 3.5084, "step": 20600 }, { "epoch": 0.18235927868736643, "grad_norm": 0.5540192723274231, "learning_rate": 9.970235237438927e-05, "loss": 3.3147, "step": 20650 }, { "epoch": 0.18280082657765062, "grad_norm": 3.3821587562561035, "learning_rate": 9.970083644285129e-05, "loss": 3.7705, "step": 20700 }, { "epoch": 0.1832423744679348, "grad_norm": 2.602141857147217, "learning_rate": 9.969931667234512e-05, "loss": 3.231, "step": 20750 }, { "epoch": 0.18368392235821898, "grad_norm": 2.034205198287964, "learning_rate": 9.969779306298818e-05, "loss": 3.5487, "step": 20800 }, { "epoch": 0.18412547024850315, "grad_norm": 2.844647169113159, "learning_rate": 9.969626561489817e-05, "loss": 3.5581, "step": 20850 }, { "epoch": 0.18456701813878734, "grad_norm": 1.6377545595169067, "learning_rate": 9.969473432819305e-05, "loss": 3.8547, "step": 20900 }, { "epoch": 0.1850085660290715, "grad_norm": 7.625864505767822, "learning_rate": 9.96931992029911e-05, "loss": 3.1848, "step": 20950 }, { "epoch": 0.1854501139193557, "grad_norm": 1.9800269603729248, "learning_rate": 9.969166023941089e-05, "loss": 3.1796, "step": 21000 }, { "epoch": 0.1854501139193557, "eval_asr_loss": 0.8812751858267902, "eval_loss": 3.14119029045105, "eval_runtime": 19.8885, "eval_samples_per_second": 38.615, "eval_steps_per_second": 9.654, "eval_tts_loss": 5.95043236252646, "step": 21000 }, { "epoch": 0.18589166180963987, "grad_norm": 0.8084915280342102, "learning_rate": 9.969011743757131e-05, "loss": 3.1867, "step": 21050 }, { "epoch": 0.18633320969992406, "grad_norm": 4.80615234375, "learning_rate": 9.968857079759151e-05, "loss": 3.3244, "step": 21100 }, { "epoch": 0.18677475759020823, "grad_norm": 1.4828674793243408, "learning_rate": 9.968702031959096e-05, "loss": 2.8393, "step": 21150 }, { "epoch": 0.18721630548049242, "grad_norm": 3.2170047760009766, "learning_rate": 9.968546600368942e-05, "loss": 3.3566, "step": 21200 }, { "epoch": 0.1876578533707766, "grad_norm": 2.004711151123047, "learning_rate": 9.968390785000696e-05, "loss": 3.5387, "step": 21250 }, { "epoch": 0.18809940126106078, "grad_norm": 2.858024835586548, "learning_rate": 9.968234585866393e-05, "loss": 3.4617, "step": 21300 }, { "epoch": 0.18854094915134495, "grad_norm": 2.3722898960113525, "learning_rate": 9.968078002978097e-05, "loss": 3.3284, "step": 21350 }, { "epoch": 0.18898249704162914, "grad_norm": 3.7834815979003906, "learning_rate": 9.967921036347904e-05, "loss": 3.2961, "step": 21400 }, { "epoch": 0.1894240449319133, "grad_norm": 1.9320305585861206, "learning_rate": 9.967763685987937e-05, "loss": 3.0897, "step": 21450 }, { "epoch": 0.1898655928221975, "grad_norm": 2.595550537109375, "learning_rate": 9.96760595191035e-05, "loss": 3.1881, "step": 21500 }, { "epoch": 0.19030714071248167, "grad_norm": 2.7209346294403076, "learning_rate": 9.967447834127328e-05, "loss": 3.9049, "step": 21550 }, { "epoch": 0.19074868860276586, "grad_norm": 2.9117844104766846, "learning_rate": 9.967289332651085e-05, "loss": 3.7096, "step": 21600 }, { "epoch": 0.19119023649305003, "grad_norm": 3.910952091217041, "learning_rate": 9.96713044749386e-05, "loss": 3.2424, "step": 21650 }, { "epoch": 0.19163178438333422, "grad_norm": 0.9400450587272644, "learning_rate": 9.96697117866793e-05, "loss": 2.9279, "step": 21700 }, { "epoch": 0.1920733322736184, "grad_norm": 3.050595760345459, "learning_rate": 9.966811526185594e-05, "loss": 3.4662, "step": 21750 }, { "epoch": 0.19251488016390259, "grad_norm": 4.441404819488525, "learning_rate": 9.966651490059184e-05, "loss": 3.0896, "step": 21800 }, { "epoch": 0.19295642805418675, "grad_norm": 3.2662458419799805, "learning_rate": 9.966491070301064e-05, "loss": 3.1643, "step": 21850 }, { "epoch": 0.19339797594447095, "grad_norm": 1.5034865140914917, "learning_rate": 9.966330266923623e-05, "loss": 3.1728, "step": 21900 }, { "epoch": 0.1938395238347551, "grad_norm": 1.3567650318145752, "learning_rate": 9.966169079939282e-05, "loss": 3.433, "step": 21950 }, { "epoch": 0.1942810717250393, "grad_norm": 2.380711793899536, "learning_rate": 9.966007509360492e-05, "loss": 3.3513, "step": 22000 }, { "epoch": 0.19472261961532347, "grad_norm": 1.819464921951294, "learning_rate": 9.965845555199732e-05, "loss": 3.0924, "step": 22050 }, { "epoch": 0.19516416750560767, "grad_norm": 1.84955894947052, "learning_rate": 9.965683217469513e-05, "loss": 3.3711, "step": 22100 }, { "epoch": 0.19560571539589183, "grad_norm": 2.3455941677093506, "learning_rate": 9.965520496182371e-05, "loss": 3.5974, "step": 22150 }, { "epoch": 0.19604726328617603, "grad_norm": 1.5657644271850586, "learning_rate": 9.965357391350877e-05, "loss": 3.5424, "step": 22200 }, { "epoch": 0.1964888111764602, "grad_norm": 2.460850477218628, "learning_rate": 9.965193902987632e-05, "loss": 3.4851, "step": 22250 }, { "epoch": 0.1969303590667444, "grad_norm": 4.1430768966674805, "learning_rate": 9.96503003110526e-05, "loss": 3.6162, "step": 22300 }, { "epoch": 0.19737190695702855, "grad_norm": 1.511238932609558, "learning_rate": 9.964865775716421e-05, "loss": 3.4041, "step": 22350 }, { "epoch": 0.19781345484731275, "grad_norm": 5.789384841918945, "learning_rate": 9.964701136833801e-05, "loss": 3.6231, "step": 22400 }, { "epoch": 0.1982550027375969, "grad_norm": 1.697780728340149, "learning_rate": 9.964536114470118e-05, "loss": 3.5089, "step": 22450 }, { "epoch": 0.1986965506278811, "grad_norm": 1.9673670530319214, "learning_rate": 9.964370708638118e-05, "loss": 3.1656, "step": 22500 }, { "epoch": 0.19913809851816527, "grad_norm": 1.157395601272583, "learning_rate": 9.964204919350577e-05, "loss": 3.5386, "step": 22550 }, { "epoch": 0.19957964640844947, "grad_norm": 1.872974157333374, "learning_rate": 9.964038746620303e-05, "loss": 3.8252, "step": 22600 }, { "epoch": 0.20002119429873363, "grad_norm": 2.0033257007598877, "learning_rate": 9.963872190460128e-05, "loss": 3.0679, "step": 22650 }, { "epoch": 0.20046274218901783, "grad_norm": 1.9374537467956543, "learning_rate": 9.963705250882919e-05, "loss": 3.4905, "step": 22700 }, { "epoch": 0.200904290079302, "grad_norm": 1.368355393409729, "learning_rate": 9.963537927901571e-05, "loss": 3.1261, "step": 22750 }, { "epoch": 0.2013458379695862, "grad_norm": 2.8965837955474854, "learning_rate": 9.963370221529007e-05, "loss": 3.434, "step": 22800 }, { "epoch": 0.20178738585987036, "grad_norm": 4.269700527191162, "learning_rate": 9.96320213177818e-05, "loss": 3.3089, "step": 22850 }, { "epoch": 0.20222893375015455, "grad_norm": 2.0559756755828857, "learning_rate": 9.963033658662078e-05, "loss": 3.0824, "step": 22900 }, { "epoch": 0.20267048164043872, "grad_norm": 2.692986011505127, "learning_rate": 9.962868183079845e-05, "loss": 2.9178, "step": 22950 }, { "epoch": 0.2031120295307229, "grad_norm": 2.5358245372772217, "learning_rate": 9.962698950938911e-05, "loss": 3.8775, "step": 23000 }, { "epoch": 0.20355357742100708, "grad_norm": 2.653918504714966, "learning_rate": 9.962529335471565e-05, "loss": 3.9464, "step": 23050 }, { "epoch": 0.20399512531129127, "grad_norm": 2.1768572330474854, "learning_rate": 9.962362740422908e-05, "loss": 3.3436, "step": 23100 }, { "epoch": 0.20443667320157544, "grad_norm": 1.4818288087844849, "learning_rate": 9.962192366007948e-05, "loss": 3.3089, "step": 23150 }, { "epoch": 0.20487822109185963, "grad_norm": 8.97103500366211, "learning_rate": 9.962021608305707e-05, "loss": 3.2528, "step": 23200 }, { "epoch": 0.2053197689821438, "grad_norm": 2.6199610233306885, "learning_rate": 9.961850467329372e-05, "loss": 3.3929, "step": 23250 }, { "epoch": 0.205761316872428, "grad_norm": 1.7377407550811768, "learning_rate": 9.961678943092164e-05, "loss": 3.4344, "step": 23300 }, { "epoch": 0.20620286476271216, "grad_norm": 1.5331953763961792, "learning_rate": 9.961507035607332e-05, "loss": 3.6088, "step": 23350 }, { "epoch": 0.20664441265299635, "grad_norm": 5.9846014976501465, "learning_rate": 9.961334744888154e-05, "loss": 3.5139, "step": 23400 }, { "epoch": 0.20708596054328052, "grad_norm": 1.6636093854904175, "learning_rate": 9.961162070947936e-05, "loss": 3.783, "step": 23450 }, { "epoch": 0.2075275084335647, "grad_norm": 4.730373382568359, "learning_rate": 9.960989013800019e-05, "loss": 3.5666, "step": 23500 }, { "epoch": 0.20796905632384888, "grad_norm": 1.7042059898376465, "learning_rate": 9.960815573457769e-05, "loss": 3.4093, "step": 23550 }, { "epoch": 0.20841060421413307, "grad_norm": 1.6063077449798584, "learning_rate": 9.96064174993458e-05, "loss": 2.9415, "step": 23600 }, { "epoch": 0.20885215210441724, "grad_norm": 2.5845298767089844, "learning_rate": 9.960467543243884e-05, "loss": 3.421, "step": 23650 }, { "epoch": 0.20929369999470143, "grad_norm": 2.843557119369507, "learning_rate": 9.960292953399131e-05, "loss": 3.7182, "step": 23700 }, { "epoch": 0.2097352478849856, "grad_norm": 2.7299513816833496, "learning_rate": 9.960117980413811e-05, "loss": 3.4932, "step": 23750 }, { "epoch": 0.2101767957752698, "grad_norm": 3.5709776878356934, "learning_rate": 9.959942624301437e-05, "loss": 3.4617, "step": 23800 }, { "epoch": 0.21061834366555396, "grad_norm": 1.636993408203125, "learning_rate": 9.959766885075556e-05, "loss": 3.2784, "step": 23850 }, { "epoch": 0.21105989155583815, "grad_norm": 1.2677909135818481, "learning_rate": 9.959590762749738e-05, "loss": 3.5762, "step": 23900 }, { "epoch": 0.21150143944612232, "grad_norm": 4.043791770935059, "learning_rate": 9.959414257337591e-05, "loss": 3.1917, "step": 23950 }, { "epoch": 0.2119429873364065, "grad_norm": 3.0787148475646973, "learning_rate": 9.959237368852747e-05, "loss": 3.307, "step": 24000 }, { "epoch": 0.2119429873364065, "eval_asr_loss": 0.8739951278052379, "eval_loss": 3.1280133724212646, "eval_runtime": 20.4078, "eval_samples_per_second": 37.633, "eval_steps_per_second": 9.408, "eval_tts_loss": 5.9267985776675065, "step": 24000 }, { "epoch": 0.21238453522669068, "grad_norm": 2.54976487159729, "learning_rate": 9.95906009730887e-05, "loss": 3.178, "step": 24050 }, { "epoch": 0.21282608311697487, "grad_norm": 1.0521984100341797, "learning_rate": 9.958882442719652e-05, "loss": 3.7995, "step": 24100 }, { "epoch": 0.21326763100725904, "grad_norm": 1.708702564239502, "learning_rate": 9.958704405098815e-05, "loss": 3.5033, "step": 24150 }, { "epoch": 0.21370917889754323, "grad_norm": 2.1780407428741455, "learning_rate": 9.958525984460112e-05, "loss": 3.4977, "step": 24200 }, { "epoch": 0.2141507267878274, "grad_norm": 2.227015972137451, "learning_rate": 9.958347180817323e-05, "loss": 2.9502, "step": 24250 }, { "epoch": 0.2145922746781116, "grad_norm": 1.5458627939224243, "learning_rate": 9.95816799418426e-05, "loss": 3.3266, "step": 24300 }, { "epoch": 0.21503382256839576, "grad_norm": 1.8348188400268555, "learning_rate": 9.957988424574764e-05, "loss": 3.3448, "step": 24350 }, { "epoch": 0.21547537045867995, "grad_norm": 2.4458820819854736, "learning_rate": 9.957808472002704e-05, "loss": 3.9597, "step": 24400 }, { "epoch": 0.21591691834896412, "grad_norm": 1.2153455018997192, "learning_rate": 9.957628136481981e-05, "loss": 3.0095, "step": 24450 }, { "epoch": 0.21635846623924831, "grad_norm": 1.6723811626434326, "learning_rate": 9.957447418026525e-05, "loss": 3.3562, "step": 24500 }, { "epoch": 0.21680001412953248, "grad_norm": 4.406776428222656, "learning_rate": 9.957266316650291e-05, "loss": 3.4466, "step": 24550 }, { "epoch": 0.21724156201981668, "grad_norm": 5.27102518081665, "learning_rate": 9.957084832367273e-05, "loss": 3.3489, "step": 24600 }, { "epoch": 0.21768310991010084, "grad_norm": 2.6601903438568115, "learning_rate": 9.956902965191486e-05, "loss": 3.3423, "step": 24650 }, { "epoch": 0.21812465780038504, "grad_norm": 1.5734961032867432, "learning_rate": 9.956720715136977e-05, "loss": 3.3207, "step": 24700 }, { "epoch": 0.2185662056906692, "grad_norm": 2.8826043605804443, "learning_rate": 9.956538082217826e-05, "loss": 3.1197, "step": 24750 }, { "epoch": 0.2190077535809534, "grad_norm": 1.6438381671905518, "learning_rate": 9.956355066448138e-05, "loss": 3.1748, "step": 24800 }, { "epoch": 0.21944930147123756, "grad_norm": 1.6108850240707397, "learning_rate": 9.956171667842048e-05, "loss": 3.7212, "step": 24850 }, { "epoch": 0.21989084936152176, "grad_norm": 1.1622767448425293, "learning_rate": 9.955987886413725e-05, "loss": 3.553, "step": 24900 }, { "epoch": 0.22033239725180592, "grad_norm": 1.478102207183838, "learning_rate": 9.955803722177363e-05, "loss": 3.2171, "step": 24950 }, { "epoch": 0.22077394514209012, "grad_norm": 2.2040581703186035, "learning_rate": 9.95561917514719e-05, "loss": 3.4424, "step": 25000 }, { "epoch": 0.22121549303237428, "grad_norm": 1.8141365051269531, "learning_rate": 9.955434245337457e-05, "loss": 3.6843, "step": 25050 }, { "epoch": 0.22165704092265848, "grad_norm": 1.6195813417434692, "learning_rate": 9.955248932762447e-05, "loss": 3.7377, "step": 25100 }, { "epoch": 0.22209858881294264, "grad_norm": 2.677534580230713, "learning_rate": 9.955063237436477e-05, "loss": 3.5157, "step": 25150 }, { "epoch": 0.22254013670322684, "grad_norm": 1.7741618156433105, "learning_rate": 9.954880884685869e-05, "loss": 3.4256, "step": 25200 }, { "epoch": 0.222981684593511, "grad_norm": 0.9014108777046204, "learning_rate": 9.954694431555343e-05, "loss": 3.5028, "step": 25250 }, { "epoch": 0.2234232324837952, "grad_norm": 2.5479094982147217, "learning_rate": 9.954507595716685e-05, "loss": 3.7392, "step": 25300 }, { "epoch": 0.22386478037407936, "grad_norm": 1.6607797145843506, "learning_rate": 9.954320377184328e-05, "loss": 3.7739, "step": 25350 }, { "epoch": 0.22430632826436356, "grad_norm": 2.8050482273101807, "learning_rate": 9.954132775972735e-05, "loss": 3.3908, "step": 25400 }, { "epoch": 0.22474787615464772, "grad_norm": 0.8809208869934082, "learning_rate": 9.953944792096392e-05, "loss": 3.4604, "step": 25450 }, { "epoch": 0.22518942404493192, "grad_norm": 4.810263156890869, "learning_rate": 9.953756425569824e-05, "loss": 3.3016, "step": 25500 }, { "epoch": 0.22563097193521608, "grad_norm": 0.7430752515792847, "learning_rate": 9.95356767640758e-05, "loss": 3.3905, "step": 25550 }, { "epoch": 0.22607251982550028, "grad_norm": 1.2698413133621216, "learning_rate": 9.953378544624237e-05, "loss": 3.4881, "step": 25600 }, { "epoch": 0.22651406771578445, "grad_norm": 2.968998908996582, "learning_rate": 9.953189030234405e-05, "loss": 3.2608, "step": 25650 }, { "epoch": 0.22695561560606864, "grad_norm": 8.66765308380127, "learning_rate": 9.95299913325272e-05, "loss": 3.1594, "step": 25700 }, { "epoch": 0.2273971634963528, "grad_norm": 3.061037302017212, "learning_rate": 9.952808853693856e-05, "loss": 3.5046, "step": 25750 }, { "epoch": 0.227838711386637, "grad_norm": 1.6408863067626953, "learning_rate": 9.952618191572503e-05, "loss": 2.9974, "step": 25800 }, { "epoch": 0.22828025927692117, "grad_norm": 1.7766352891921997, "learning_rate": 9.952427146903393e-05, "loss": 3.4394, "step": 25850 }, { "epoch": 0.22872180716720536, "grad_norm": 1.8550622463226318, "learning_rate": 9.952235719701283e-05, "loss": 3.2499, "step": 25900 }, { "epoch": 0.22916335505748953, "grad_norm": 4.626537799835205, "learning_rate": 9.952043909980955e-05, "loss": 3.3191, "step": 25950 }, { "epoch": 0.22960490294777372, "grad_norm": 1.894518256187439, "learning_rate": 9.951851717757228e-05, "loss": 3.2887, "step": 26000 }, { "epoch": 0.2300464508380579, "grad_norm": 2.2566466331481934, "learning_rate": 9.951659143044947e-05, "loss": 3.2224, "step": 26050 }, { "epoch": 0.23048799872834208, "grad_norm": 1.502895474433899, "learning_rate": 9.951466185858985e-05, "loss": 3.4682, "step": 26100 }, { "epoch": 0.23092954661862625, "grad_norm": 3.664829730987549, "learning_rate": 9.951272846214247e-05, "loss": 3.0882, "step": 26150 }, { "epoch": 0.23137109450891044, "grad_norm": 2.9891955852508545, "learning_rate": 9.951079124125668e-05, "loss": 3.1857, "step": 26200 }, { "epoch": 0.2318126423991946, "grad_norm": 1.8004250526428223, "learning_rate": 9.950885019608211e-05, "loss": 3.3859, "step": 26250 }, { "epoch": 0.2322541902894788, "grad_norm": 2.643892526626587, "learning_rate": 9.950690532676869e-05, "loss": 3.1025, "step": 26300 }, { "epoch": 0.23269573817976297, "grad_norm": 8.552020072937012, "learning_rate": 9.950495663346662e-05, "loss": 2.8521, "step": 26350 }, { "epoch": 0.23313728607004716, "grad_norm": 2.302180528640747, "learning_rate": 9.950300411632645e-05, "loss": 3.6808, "step": 26400 }, { "epoch": 0.23357883396033133, "grad_norm": 2.410003185272217, "learning_rate": 9.950104777549898e-05, "loss": 3.2536, "step": 26450 }, { "epoch": 0.23402038185061552, "grad_norm": 3.413940906524658, "learning_rate": 9.949908761113534e-05, "loss": 3.6206, "step": 26500 }, { "epoch": 0.2344619297408997, "grad_norm": 1.564780354499817, "learning_rate": 9.949712362338692e-05, "loss": 3.4719, "step": 26550 }, { "epoch": 0.23490347763118388, "grad_norm": 1.3204708099365234, "learning_rate": 9.94951558124054e-05, "loss": 3.95, "step": 26600 }, { "epoch": 0.23534502552146805, "grad_norm": 2.4696288108825684, "learning_rate": 9.949318417834283e-05, "loss": 3.8591, "step": 26650 }, { "epoch": 0.23578657341175224, "grad_norm": 1.5461256504058838, "learning_rate": 9.949120872135144e-05, "loss": 3.6145, "step": 26700 }, { "epoch": 0.2362281213020364, "grad_norm": 1.8961230516433716, "learning_rate": 9.948922944158388e-05, "loss": 3.4088, "step": 26750 }, { "epoch": 0.2366696691923206, "grad_norm": 3.098393201828003, "learning_rate": 9.9487246339193e-05, "loss": 3.1048, "step": 26800 }, { "epoch": 0.23711121708260477, "grad_norm": 0.8614824414253235, "learning_rate": 9.948525941433195e-05, "loss": 3.3338, "step": 26850 }, { "epoch": 0.23755276497288896, "grad_norm": 2.924774646759033, "learning_rate": 9.948326866715426e-05, "loss": 3.2607, "step": 26900 }, { "epoch": 0.23799431286317313, "grad_norm": 5.287550449371338, "learning_rate": 9.948127409781367e-05, "loss": 3.4998, "step": 26950 }, { "epoch": 0.23843586075345732, "grad_norm": 1.865439534187317, "learning_rate": 9.947927570646424e-05, "loss": 3.1788, "step": 27000 }, { "epoch": 0.23843586075345732, "eval_asr_loss": 0.8890385764932143, "eval_loss": 3.1024086475372314, "eval_runtime": 19.895, "eval_samples_per_second": 38.603, "eval_steps_per_second": 9.651, "eval_tts_loss": 5.952762275795868, "step": 27000 }, { "epoch": 0.2388774086437415, "grad_norm": 4.502477169036865, "learning_rate": 9.947727349326033e-05, "loss": 2.8551, "step": 27050 }, { "epoch": 0.23931895653402568, "grad_norm": 2.6308412551879883, "learning_rate": 9.94752674583566e-05, "loss": 3.4248, "step": 27100 }, { "epoch": 0.23976050442430985, "grad_norm": 1.3937596082687378, "learning_rate": 9.9473257601908e-05, "loss": 3.0961, "step": 27150 }, { "epoch": 0.24020205231459404, "grad_norm": 2.9952657222747803, "learning_rate": 9.947124392406977e-05, "loss": 3.7838, "step": 27200 }, { "epoch": 0.2406436002048782, "grad_norm": 3.2740750312805176, "learning_rate": 9.946922642499744e-05, "loss": 3.5869, "step": 27250 }, { "epoch": 0.2410851480951624, "grad_norm": 1.290229082107544, "learning_rate": 9.946720510484686e-05, "loss": 3.521, "step": 27300 }, { "epoch": 0.24152669598544657, "grad_norm": 0.997048020362854, "learning_rate": 9.946517996377417e-05, "loss": 3.1288, "step": 27350 }, { "epoch": 0.24196824387573077, "grad_norm": 1.3124210834503174, "learning_rate": 9.946315100193577e-05, "loss": 2.9812, "step": 27400 }, { "epoch": 0.24240979176601493, "grad_norm": 1.2291759252548218, "learning_rate": 9.946111821948839e-05, "loss": 3.3039, "step": 27450 }, { "epoch": 0.24285133965629913, "grad_norm": 1.4661563634872437, "learning_rate": 9.945908161658904e-05, "loss": 3.271, "step": 27500 }, { "epoch": 0.2432928875465833, "grad_norm": 2.156710624694824, "learning_rate": 9.945704119339506e-05, "loss": 3.7045, "step": 27550 }, { "epoch": 0.24373443543686749, "grad_norm": 3.8776209354400635, "learning_rate": 9.9454996950064e-05, "loss": 3.6893, "step": 27600 }, { "epoch": 0.24417598332715165, "grad_norm": 1.0397090911865234, "learning_rate": 9.945294888675381e-05, "loss": 3.5694, "step": 27650 }, { "epoch": 0.24461753121743585, "grad_norm": 6.586629867553711, "learning_rate": 9.945089700362266e-05, "loss": 3.4295, "step": 27700 }, { "epoch": 0.24505907910772, "grad_norm": 1.8651010990142822, "learning_rate": 9.944884130082905e-05, "loss": 3.4199, "step": 27750 }, { "epoch": 0.2455006269980042, "grad_norm": 2.351825475692749, "learning_rate": 9.944678177853176e-05, "loss": 3.88, "step": 27800 }, { "epoch": 0.24594217488828837, "grad_norm": 3.991497755050659, "learning_rate": 9.944471843688987e-05, "loss": 3.5451, "step": 27850 }, { "epoch": 0.24638372277857257, "grad_norm": 4.23520040512085, "learning_rate": 9.944265127606278e-05, "loss": 3.2976, "step": 27900 }, { "epoch": 0.24682527066885673, "grad_norm": 3.55438232421875, "learning_rate": 9.944058029621013e-05, "loss": 2.9787, "step": 27950 }, { "epoch": 0.24726681855914093, "grad_norm": 6.835021495819092, "learning_rate": 9.94385054974919e-05, "loss": 3.151, "step": 28000 }, { "epoch": 0.2477083664494251, "grad_norm": 2.232171058654785, "learning_rate": 9.943642688006834e-05, "loss": 3.353, "step": 28050 }, { "epoch": 0.2481499143397093, "grad_norm": 2.0550529956817627, "learning_rate": 9.94343444441e-05, "loss": 3.3, "step": 28100 }, { "epoch": 0.24859146222999345, "grad_norm": 0.9220274686813354, "learning_rate": 9.943225818974775e-05, "loss": 3.3238, "step": 28150 }, { "epoch": 0.24903301012027765, "grad_norm": 3.3017609119415283, "learning_rate": 9.943016811717275e-05, "loss": 3.652, "step": 28200 }, { "epoch": 0.24947455801056181, "grad_norm": 2.590731620788574, "learning_rate": 9.94280742265364e-05, "loss": 3.362, "step": 28250 }, { "epoch": 0.249916105900846, "grad_norm": 1.249603271484375, "learning_rate": 9.942597651800046e-05, "loss": 3.2159, "step": 28300 }, { "epoch": 0.2503576537911302, "grad_norm": 3.9774234294891357, "learning_rate": 9.942387499172694e-05, "loss": 3.2234, "step": 28350 }, { "epoch": 0.25079920168141434, "grad_norm": 1.4465439319610596, "learning_rate": 9.94217696478782e-05, "loss": 3.3785, "step": 28400 }, { "epoch": 0.25124074957169856, "grad_norm": 0.9342706799507141, "learning_rate": 9.941966048661682e-05, "loss": 3.1059, "step": 28450 }, { "epoch": 0.25168229746198273, "grad_norm": 2.685728073120117, "learning_rate": 9.941754750810573e-05, "loss": 3.5622, "step": 28500 }, { "epoch": 0.2521238453522669, "grad_norm": 6.555978775024414, "learning_rate": 9.941543071250816e-05, "loss": 3.3656, "step": 28550 }, { "epoch": 0.25256539324255106, "grad_norm": 0.8330821394920349, "learning_rate": 9.94133100999876e-05, "loss": 3.5714, "step": 28600 }, { "epoch": 0.2530069411328353, "grad_norm": 22.39084243774414, "learning_rate": 9.941118567070784e-05, "loss": 3.1974, "step": 28650 }, { "epoch": 0.25344848902311945, "grad_norm": 1.9922549724578857, "learning_rate": 9.940905742483298e-05, "loss": 3.5825, "step": 28700 }, { "epoch": 0.2538900369134036, "grad_norm": 1.42471444606781, "learning_rate": 9.94069253625274e-05, "loss": 3.3538, "step": 28750 }, { "epoch": 0.2543315848036878, "grad_norm": 3.5649797916412354, "learning_rate": 9.940478948395582e-05, "loss": 3.1705, "step": 28800 }, { "epoch": 0.254773132693972, "grad_norm": 2.5138847827911377, "learning_rate": 9.940264978928318e-05, "loss": 3.7867, "step": 28850 }, { "epoch": 0.25521468058425617, "grad_norm": 1.892633080482483, "learning_rate": 9.940050627867476e-05, "loss": 3.1689, "step": 28900 }, { "epoch": 0.25565622847454034, "grad_norm": 1.2871747016906738, "learning_rate": 9.939835895229615e-05, "loss": 3.651, "step": 28950 }, { "epoch": 0.2560977763648245, "grad_norm": 4.025623798370361, "learning_rate": 9.939620781031318e-05, "loss": 3.472, "step": 29000 }, { "epoch": 0.2565393242551087, "grad_norm": 2.834179162979126, "learning_rate": 9.939405285289203e-05, "loss": 3.3033, "step": 29050 }, { "epoch": 0.2569808721453929, "grad_norm": 2.194532871246338, "learning_rate": 9.939189408019916e-05, "loss": 3.3813, "step": 29100 }, { "epoch": 0.25742242003567706, "grad_norm": 2.8828024864196777, "learning_rate": 9.938973149240127e-05, "loss": 3.283, "step": 29150 }, { "epoch": 0.2578639679259612, "grad_norm": 3.8372597694396973, "learning_rate": 9.938756508966547e-05, "loss": 3.324, "step": 29200 }, { "epoch": 0.25830551581624545, "grad_norm": 1.0322990417480469, "learning_rate": 9.938539487215903e-05, "loss": 3.1929, "step": 29250 }, { "epoch": 0.2587470637065296, "grad_norm": 4.666669845581055, "learning_rate": 9.938322084004964e-05, "loss": 3.2645, "step": 29300 }, { "epoch": 0.2591886115968138, "grad_norm": 2.352522850036621, "learning_rate": 9.938104299350519e-05, "loss": 2.8695, "step": 29350 }, { "epoch": 0.25963015948709794, "grad_norm": 1.9813988208770752, "learning_rate": 9.93788613326939e-05, "loss": 3.0595, "step": 29400 }, { "epoch": 0.26007170737738217, "grad_norm": 4.507404804229736, "learning_rate": 9.93766758577843e-05, "loss": 3.3092, "step": 29450 }, { "epoch": 0.26051325526766633, "grad_norm": 0.7371171712875366, "learning_rate": 9.93744865689452e-05, "loss": 3.3464, "step": 29500 }, { "epoch": 0.2609548031579505, "grad_norm": 1.4071518182754517, "learning_rate": 9.937229346634566e-05, "loss": 2.8855, "step": 29550 }, { "epoch": 0.26139635104823467, "grad_norm": 2.9686851501464844, "learning_rate": 9.937009655015515e-05, "loss": 3.1077, "step": 29600 }, { "epoch": 0.2618378989385189, "grad_norm": 2.3262200355529785, "learning_rate": 9.936789582054331e-05, "loss": 3.1181, "step": 29650 }, { "epoch": 0.26227944682880305, "grad_norm": 1.0248990058898926, "learning_rate": 9.936569127768017e-05, "loss": 3.4325, "step": 29700 }, { "epoch": 0.2627209947190872, "grad_norm": 3.9115331172943115, "learning_rate": 9.936348292173596e-05, "loss": 3.132, "step": 29750 }, { "epoch": 0.2631625426093714, "grad_norm": 4.591248512268066, "learning_rate": 9.936127075288128e-05, "loss": 3.151, "step": 29800 }, { "epoch": 0.2636040904996556, "grad_norm": 2.473047971725464, "learning_rate": 9.935905477128703e-05, "loss": 3.4772, "step": 29850 }, { "epoch": 0.2640456383899398, "grad_norm": 1.3905609846115112, "learning_rate": 9.935683497712434e-05, "loss": 2.9894, "step": 29900 }, { "epoch": 0.26448718628022394, "grad_norm": 2.24664568901062, "learning_rate": 9.935461137056469e-05, "loss": 3.689, "step": 29950 }, { "epoch": 0.2649287341705081, "grad_norm": 1.1567890644073486, "learning_rate": 9.935238395177981e-05, "loss": 3.2017, "step": 30000 }, { "epoch": 0.2649287341705081, "eval_asr_loss": 0.8843846080425863, "eval_loss": 3.092066526412964, "eval_runtime": 19.8662, "eval_samples_per_second": 38.659, "eval_steps_per_second": 9.665, "eval_tts_loss": 5.902505457880168, "step": 30000 }, { "epoch": 0.26537028206079233, "grad_norm": 5.792656421661377, "learning_rate": 9.935015272094178e-05, "loss": 3.3117, "step": 30050 }, { "epoch": 0.2658118299510765, "grad_norm": 3.329699993133545, "learning_rate": 9.934791767822292e-05, "loss": 3.4779, "step": 30100 }, { "epoch": 0.26625337784136066, "grad_norm": 1.1682640314102173, "learning_rate": 9.934567882379589e-05, "loss": 3.0166, "step": 30150 }, { "epoch": 0.2666949257316448, "grad_norm": 2.778029441833496, "learning_rate": 9.93434361578336e-05, "loss": 3.209, "step": 30200 }, { "epoch": 0.26713647362192905, "grad_norm": 2.259307622909546, "learning_rate": 9.934118968050929e-05, "loss": 3.0044, "step": 30250 }, { "epoch": 0.2675780215122132, "grad_norm": 2.9219155311584473, "learning_rate": 9.933898443511526e-05, "loss": 3.7484, "step": 30300 }, { "epoch": 0.2680195694024974, "grad_norm": 1.4216759204864502, "learning_rate": 9.933673041180634e-05, "loss": 3.2729, "step": 30350 }, { "epoch": 0.26846111729278155, "grad_norm": 0.8821609020233154, "learning_rate": 9.933447257765338e-05, "loss": 2.7456, "step": 30400 }, { "epoch": 0.26890266518306577, "grad_norm": 4.904880046844482, "learning_rate": 9.933221093283074e-05, "loss": 3.2778, "step": 30450 }, { "epoch": 0.26934421307334994, "grad_norm": 1.9883630275726318, "learning_rate": 9.932994547751313e-05, "loss": 3.4769, "step": 30500 }, { "epoch": 0.2697857609636341, "grad_norm": 3.2121076583862305, "learning_rate": 9.932767621187555e-05, "loss": 3.2237, "step": 30550 }, { "epoch": 0.27022730885391827, "grad_norm": 0.9768474102020264, "learning_rate": 9.932540313609326e-05, "loss": 3.2586, "step": 30600 }, { "epoch": 0.2706688567442025, "grad_norm": 1.543562889099121, "learning_rate": 9.932312625034186e-05, "loss": 3.5258, "step": 30650 }, { "epoch": 0.27111040463448666, "grad_norm": 1.3274840116500854, "learning_rate": 9.932084555479719e-05, "loss": 3.2218, "step": 30700 }, { "epoch": 0.2715519525247708, "grad_norm": 3.6982388496398926, "learning_rate": 9.931856104963543e-05, "loss": 3.4994, "step": 30750 }, { "epoch": 0.271993500415055, "grad_norm": 1.6436798572540283, "learning_rate": 9.931627273503304e-05, "loss": 3.4035, "step": 30800 }, { "epoch": 0.2724350483053392, "grad_norm": 4.315684795379639, "learning_rate": 9.931398061116677e-05, "loss": 3.4837, "step": 30850 }, { "epoch": 0.2728765961956234, "grad_norm": 1.2069634199142456, "learning_rate": 9.931168467821367e-05, "loss": 3.2571, "step": 30900 }, { "epoch": 0.27331814408590754, "grad_norm": 3.137390375137329, "learning_rate": 9.930938493635109e-05, "loss": 3.2457, "step": 30950 }, { "epoch": 0.2737596919761917, "grad_norm": 1.6049838066101074, "learning_rate": 9.930708138575664e-05, "loss": 2.9232, "step": 31000 }, { "epoch": 0.27420123986647593, "grad_norm": 2.4024581909179688, "learning_rate": 9.930477402660827e-05, "loss": 3.1933, "step": 31050 }, { "epoch": 0.2746427877567601, "grad_norm": 2.070897340774536, "learning_rate": 9.93024628590842e-05, "loss": 3.1729, "step": 31100 }, { "epoch": 0.27508433564704426, "grad_norm": 2.37459397315979, "learning_rate": 9.930014788336293e-05, "loss": 3.0385, "step": 31150 }, { "epoch": 0.27552588353732843, "grad_norm": 3.5087156295776367, "learning_rate": 9.92978290996233e-05, "loss": 3.1936, "step": 31200 }, { "epoch": 0.27596743142761265, "grad_norm": 6.348201751708984, "learning_rate": 9.929550650804443e-05, "loss": 3.2907, "step": 31250 }, { "epoch": 0.2764089793178968, "grad_norm": 1.3216320276260376, "learning_rate": 9.929318010880566e-05, "loss": 3.1206, "step": 31300 }, { "epoch": 0.276850527208181, "grad_norm": 3.7011451721191406, "learning_rate": 9.929084990208674e-05, "loss": 3.6797, "step": 31350 }, { "epoch": 0.2772920750984652, "grad_norm": 1.47987961769104, "learning_rate": 9.928851588806763e-05, "loss": 3.2982, "step": 31400 }, { "epoch": 0.2777336229887494, "grad_norm": 2.7035892009735107, "learning_rate": 9.928617806692863e-05, "loss": 3.5924, "step": 31450 }, { "epoch": 0.27817517087903354, "grad_norm": 1.140507698059082, "learning_rate": 9.92838364388503e-05, "loss": 3.4756, "step": 31500 }, { "epoch": 0.2786167187693177, "grad_norm": 1.1196190118789673, "learning_rate": 9.928149100401354e-05, "loss": 3.5627, "step": 31550 }, { "epoch": 0.27905826665960193, "grad_norm": 3.444877862930298, "learning_rate": 9.927914176259947e-05, "loss": 3.3291, "step": 31600 }, { "epoch": 0.2794998145498861, "grad_norm": 4.410919189453125, "learning_rate": 9.927678871478958e-05, "loss": 3.1749, "step": 31650 }, { "epoch": 0.27994136244017026, "grad_norm": 1.771317481994629, "learning_rate": 9.927443186076563e-05, "loss": 2.6498, "step": 31700 }, { "epoch": 0.2803829103304544, "grad_norm": 2.1711716651916504, "learning_rate": 9.927207120070964e-05, "loss": 3.6787, "step": 31750 }, { "epoch": 0.28082445822073865, "grad_norm": 4.0384202003479, "learning_rate": 9.926970673480398e-05, "loss": 3.6298, "step": 31800 }, { "epoch": 0.2812660061110228, "grad_norm": 2.198716640472412, "learning_rate": 9.926733846323125e-05, "loss": 2.9091, "step": 31850 }, { "epoch": 0.281707554001307, "grad_norm": 1.819726586341858, "learning_rate": 9.92649663861744e-05, "loss": 3.5799, "step": 31900 }, { "epoch": 0.28214910189159115, "grad_norm": 1.7500370740890503, "learning_rate": 9.926259050381667e-05, "loss": 3.4091, "step": 31950 }, { "epoch": 0.28259064978187537, "grad_norm": 4.203965663909912, "learning_rate": 9.926021081634155e-05, "loss": 3.2101, "step": 32000 }, { "epoch": 0.28303219767215954, "grad_norm": 2.5612051486968994, "learning_rate": 9.925782732393285e-05, "loss": 3.0251, "step": 32050 }, { "epoch": 0.2834737455624437, "grad_norm": 1.427227258682251, "learning_rate": 9.92554400267747e-05, "loss": 3.2904, "step": 32100 }, { "epoch": 0.28391529345272787, "grad_norm": 1.6692140102386475, "learning_rate": 9.925304892505146e-05, "loss": 3.74, "step": 32150 }, { "epoch": 0.2843568413430121, "grad_norm": 1.287840485572815, "learning_rate": 9.925065401894786e-05, "loss": 3.5495, "step": 32200 }, { "epoch": 0.28479838923329626, "grad_norm": 1.882232427597046, "learning_rate": 9.924825530864887e-05, "loss": 3.2056, "step": 32250 }, { "epoch": 0.2852399371235804, "grad_norm": 1.2615700960159302, "learning_rate": 9.924585279433976e-05, "loss": 3.1918, "step": 32300 }, { "epoch": 0.2856814850138646, "grad_norm": 1.7634786367416382, "learning_rate": 9.924344647620612e-05, "loss": 3.5558, "step": 32350 }, { "epoch": 0.2861230329041488, "grad_norm": 1.2818759679794312, "learning_rate": 9.924103635443381e-05, "loss": 3.0904, "step": 32400 }, { "epoch": 0.286564580794433, "grad_norm": 3.8670778274536133, "learning_rate": 9.9238622429209e-05, "loss": 3.8008, "step": 32450 }, { "epoch": 0.28700612868471714, "grad_norm": 2.2975099086761475, "learning_rate": 9.923625309255875e-05, "loss": 2.9886, "step": 32500 }, { "epoch": 0.2874476765750013, "grad_norm": 3.461240530014038, "learning_rate": 9.923383163704833e-05, "loss": 3.3378, "step": 32550 }, { "epoch": 0.28788922446528553, "grad_norm": 1.2354512214660645, "learning_rate": 9.923140637864192e-05, "loss": 3.4829, "step": 32600 }, { "epoch": 0.2883307723555697, "grad_norm": 2.5937671661376953, "learning_rate": 9.922897731752683e-05, "loss": 3.3737, "step": 32650 }, { "epoch": 0.28877232024585386, "grad_norm": 2.7086663246154785, "learning_rate": 9.922654445389069e-05, "loss": 3.3247, "step": 32700 }, { "epoch": 0.28921386813613803, "grad_norm": 1.1576141119003296, "learning_rate": 9.922410778792142e-05, "loss": 3.3032, "step": 32750 }, { "epoch": 0.28965541602642225, "grad_norm": 4.800418853759766, "learning_rate": 9.922166731980725e-05, "loss": 3.7363, "step": 32800 }, { "epoch": 0.2900969639167064, "grad_norm": 1.6237969398498535, "learning_rate": 9.921922304973663e-05, "loss": 3.6136, "step": 32850 }, { "epoch": 0.2905385118069906, "grad_norm": 3.0943002700805664, "learning_rate": 9.921677497789843e-05, "loss": 3.18, "step": 32900 }, { "epoch": 0.29098005969727475, "grad_norm": 2.4920825958251953, "learning_rate": 9.921432310448169e-05, "loss": 3.2806, "step": 32950 }, { "epoch": 0.291421607587559, "grad_norm": 4.264760971069336, "learning_rate": 9.921186742967581e-05, "loss": 2.9155, "step": 33000 }, { "epoch": 0.291421607587559, "eval_asr_loss": 0.895976054242954, "eval_loss": 3.079402208328247, "eval_runtime": 20.0371, "eval_samples_per_second": 38.329, "eval_steps_per_second": 9.582, "eval_tts_loss": 5.968250579680352, "step": 33000 }, { "epoch": 0.29186315547784314, "grad_norm": 3.386139154434204, "learning_rate": 9.92094079536705e-05, "loss": 3.9219, "step": 33050 }, { "epoch": 0.2923047033681273, "grad_norm": 1.537395715713501, "learning_rate": 9.92069446766557e-05, "loss": 3.1577, "step": 33100 }, { "epoch": 0.29274625125841147, "grad_norm": 0.6773030757904053, "learning_rate": 9.920447759882168e-05, "loss": 2.9516, "step": 33150 }, { "epoch": 0.2931877991486957, "grad_norm": 2.179901599884033, "learning_rate": 9.9202006720359e-05, "loss": 3.7766, "step": 33200 }, { "epoch": 0.29362934703897986, "grad_norm": 4.384719371795654, "learning_rate": 9.919953204145853e-05, "loss": 3.4504, "step": 33250 }, { "epoch": 0.294070894929264, "grad_norm": 5.59550666809082, "learning_rate": 9.919705356231141e-05, "loss": 3.2456, "step": 33300 }, { "epoch": 0.2945124428195482, "grad_norm": 1.8736000061035156, "learning_rate": 9.919457128310909e-05, "loss": 3.5584, "step": 33350 }, { "epoch": 0.2949539907098324, "grad_norm": 2.3086678981781006, "learning_rate": 9.919208520404328e-05, "loss": 3.3485, "step": 33400 }, { "epoch": 0.2953955386001166, "grad_norm": 1.736769676208496, "learning_rate": 9.918959532530604e-05, "loss": 3.426, "step": 33450 }, { "epoch": 0.29583708649040075, "grad_norm": 1.8966106176376343, "learning_rate": 9.918710164708967e-05, "loss": 3.0041, "step": 33500 }, { "epoch": 0.2962786343806849, "grad_norm": 3.222245216369629, "learning_rate": 9.918460416958679e-05, "loss": 3.0005, "step": 33550 }, { "epoch": 0.29672018227096914, "grad_norm": 0.8299497365951538, "learning_rate": 9.918210289299031e-05, "loss": 3.3702, "step": 33600 }, { "epoch": 0.2971617301612533, "grad_norm": 3.876796245574951, "learning_rate": 9.917959781749345e-05, "loss": 3.243, "step": 33650 }, { "epoch": 0.29760327805153747, "grad_norm": 4.3070807456970215, "learning_rate": 9.917708894328968e-05, "loss": 3.4787, "step": 33700 }, { "epoch": 0.29804482594182163, "grad_norm": 2.306934356689453, "learning_rate": 9.917457627057279e-05, "loss": 3.2177, "step": 33750 }, { "epoch": 0.29848637383210586, "grad_norm": 3.8237075805664062, "learning_rate": 9.917205979953687e-05, "loss": 2.9533, "step": 33800 }, { "epoch": 0.29892792172239, "grad_norm": 1.627469778060913, "learning_rate": 9.91695395303763e-05, "loss": 3.3876, "step": 33850 }, { "epoch": 0.2993694696126742, "grad_norm": 1.0928808450698853, "learning_rate": 9.916701546328576e-05, "loss": 2.8597, "step": 33900 }, { "epoch": 0.29981101750295835, "grad_norm": 2.0172150135040283, "learning_rate": 9.916448759846017e-05, "loss": 3.6425, "step": 33950 }, { "epoch": 0.3002525653932426, "grad_norm": 2.5472891330718994, "learning_rate": 9.916195593609486e-05, "loss": 3.4216, "step": 34000 }, { "epoch": 0.30069411328352674, "grad_norm": 2.0118589401245117, "learning_rate": 9.91594712227922e-05, "loss": 3.1182, "step": 34050 }, { "epoch": 0.3011356611738109, "grad_norm": 1.4318723678588867, "learning_rate": 9.915693204187532e-05, "loss": 3.2758, "step": 34100 }, { "epoch": 0.3015772090640951, "grad_norm": 6.2676801681518555, "learning_rate": 9.915438906400228e-05, "loss": 3.2553, "step": 34150 }, { "epoch": 0.3020187569543793, "grad_norm": 1.1752573251724243, "learning_rate": 9.915184228936952e-05, "loss": 3.4345, "step": 34200 }, { "epoch": 0.30246030484466346, "grad_norm": 1.754859447479248, "learning_rate": 9.914929171817371e-05, "loss": 3.5339, "step": 34250 }, { "epoch": 0.30290185273494763, "grad_norm": 1.1092463731765747, "learning_rate": 9.914673735061191e-05, "loss": 3.7403, "step": 34300 }, { "epoch": 0.3033434006252318, "grad_norm": 2.3356130123138428, "learning_rate": 9.91441791868814e-05, "loss": 3.7242, "step": 34350 }, { "epoch": 0.303784948515516, "grad_norm": 1.9679780006408691, "learning_rate": 9.914161722717976e-05, "loss": 3.0423, "step": 34400 }, { "epoch": 0.3042264964058002, "grad_norm": 2.5788609981536865, "learning_rate": 9.91390514717049e-05, "loss": 3.3403, "step": 34450 }, { "epoch": 0.30466804429608435, "grad_norm": 5.141701698303223, "learning_rate": 9.913648192065503e-05, "loss": 3.0419, "step": 34500 }, { "epoch": 0.3051095921863685, "grad_norm": 0.9429007172584534, "learning_rate": 9.913390857422858e-05, "loss": 3.4227, "step": 34550 }, { "epoch": 0.30555114007665274, "grad_norm": 5.526501655578613, "learning_rate": 9.913133143262432e-05, "loss": 3.4853, "step": 34600 }, { "epoch": 0.3059926879669369, "grad_norm": 1.2435593605041504, "learning_rate": 9.912875049604135e-05, "loss": 3.1218, "step": 34650 }, { "epoch": 0.30643423585722107, "grad_norm": 3.752776861190796, "learning_rate": 9.912616576467899e-05, "loss": 3.29, "step": 34700 }, { "epoch": 0.30687578374750524, "grad_norm": 1.3690961599349976, "learning_rate": 9.91235772387369e-05, "loss": 3.5259, "step": 34750 }, { "epoch": 0.30731733163778946, "grad_norm": 5.097490310668945, "learning_rate": 9.912098491841504e-05, "loss": 3.4285, "step": 34800 }, { "epoch": 0.3077588795280736, "grad_norm": 2.029536485671997, "learning_rate": 9.911838880391362e-05, "loss": 3.5238, "step": 34850 }, { "epoch": 0.3082004274183578, "grad_norm": 1.1511303186416626, "learning_rate": 9.911578889543318e-05, "loss": 3.5973, "step": 34900 }, { "epoch": 0.30864197530864196, "grad_norm": 0.7825906872749329, "learning_rate": 9.911318519317454e-05, "loss": 3.5027, "step": 34950 }, { "epoch": 0.3090835231989262, "grad_norm": 1.2858946323394775, "learning_rate": 9.91105776973388e-05, "loss": 3.5128, "step": 35000 }, { "epoch": 0.30952507108921035, "grad_norm": 3.028378486633301, "learning_rate": 9.910796640812739e-05, "loss": 3.2618, "step": 35050 }, { "epoch": 0.3099666189794945, "grad_norm": 2.016479253768921, "learning_rate": 9.910535132574198e-05, "loss": 3.4658, "step": 35100 }, { "epoch": 0.3104081668697787, "grad_norm": 5.971153736114502, "learning_rate": 9.91027324503846e-05, "loss": 3.5681, "step": 35150 }, { "epoch": 0.3108497147600629, "grad_norm": 4.901987552642822, "learning_rate": 9.910010978225753e-05, "loss": 3.2022, "step": 35200 }, { "epoch": 0.31129126265034707, "grad_norm": 1.5224626064300537, "learning_rate": 9.90974833215633e-05, "loss": 3.4693, "step": 35250 }, { "epoch": 0.31173281054063123, "grad_norm": 1.7592462301254272, "learning_rate": 9.909485306850483e-05, "loss": 3.2146, "step": 35300 }, { "epoch": 0.3121743584309154, "grad_norm": 2.2695250511169434, "learning_rate": 9.909221902328529e-05, "loss": 3.3325, "step": 35350 }, { "epoch": 0.3126159063211996, "grad_norm": 1.2258449792861938, "learning_rate": 9.908958118610811e-05, "loss": 3.4704, "step": 35400 }, { "epoch": 0.3130574542114838, "grad_norm": 5.258679389953613, "learning_rate": 9.908693955717705e-05, "loss": 3.579, "step": 35450 }, { "epoch": 0.31349900210176795, "grad_norm": 1.850501298904419, "learning_rate": 9.908429413669615e-05, "loss": 3.2378, "step": 35500 }, { "epoch": 0.3139405499920521, "grad_norm": 1.2289330959320068, "learning_rate": 9.908164492486976e-05, "loss": 3.4111, "step": 35550 }, { "epoch": 0.31438209788233634, "grad_norm": 2.0499982833862305, "learning_rate": 9.907899192190249e-05, "loss": 3.6974, "step": 35600 }, { "epoch": 0.3148236457726205, "grad_norm": 1.0805225372314453, "learning_rate": 9.907633512799927e-05, "loss": 3.1798, "step": 35650 }, { "epoch": 0.3152651936629047, "grad_norm": 1.2690644264221191, "learning_rate": 9.907367454336533e-05, "loss": 2.9274, "step": 35700 }, { "epoch": 0.31570674155318884, "grad_norm": 5.509250164031982, "learning_rate": 9.907101016820616e-05, "loss": 3.1606, "step": 35750 }, { "epoch": 0.31614828944347306, "grad_norm": 3.2590670585632324, "learning_rate": 9.906834200272756e-05, "loss": 3.2878, "step": 35800 }, { "epoch": 0.31658983733375723, "grad_norm": 1.7757644653320312, "learning_rate": 9.906567004713563e-05, "loss": 3.3347, "step": 35850 }, { "epoch": 0.3170313852240414, "grad_norm": 6.056863784790039, "learning_rate": 9.906299430163675e-05, "loss": 3.3504, "step": 35900 }, { "epoch": 0.31747293311432556, "grad_norm": 1.0027168989181519, "learning_rate": 9.90603147664376e-05, "loss": 3.0737, "step": 35950 }, { "epoch": 0.3179144810046098, "grad_norm": 1.916014313697815, "learning_rate": 9.905763144174517e-05, "loss": 3.1648, "step": 36000 }, { "epoch": 0.3179144810046098, "eval_asr_loss": 0.8892677519164152, "eval_loss": 3.0999526977539062, "eval_runtime": 20.2917, "eval_samples_per_second": 37.848, "eval_steps_per_second": 9.462, "eval_tts_loss": 5.944016528967177, "step": 36000 }, { "epoch": 0.31835602889489395, "grad_norm": 2.1346523761749268, "learning_rate": 9.90549443277667e-05, "loss": 3.5172, "step": 36050 }, { "epoch": 0.3187975767851781, "grad_norm": 1.5300114154815674, "learning_rate": 9.905225342470974e-05, "loss": 3.2909, "step": 36100 }, { "epoch": 0.3192391246754623, "grad_norm": 1.7766307592391968, "learning_rate": 9.904955873278216e-05, "loss": 3.5821, "step": 36150 }, { "epoch": 0.3196806725657465, "grad_norm": 1.8257805109024048, "learning_rate": 9.904686025219211e-05, "loss": 3.3543, "step": 36200 }, { "epoch": 0.32012222045603067, "grad_norm": 2.9343011379241943, "learning_rate": 9.9044157983148e-05, "loss": 3.5759, "step": 36250 }, { "epoch": 0.32056376834631484, "grad_norm": 2.4320614337921143, "learning_rate": 9.904145192585857e-05, "loss": 3.2795, "step": 36300 }, { "epoch": 0.321005316236599, "grad_norm": 2.0806117057800293, "learning_rate": 9.903874208053283e-05, "loss": 2.9637, "step": 36350 }, { "epoch": 0.3214468641268832, "grad_norm": 10.894238471984863, "learning_rate": 9.903602844738012e-05, "loss": 3.5964, "step": 36400 }, { "epoch": 0.3218884120171674, "grad_norm": 1.5613411664962769, "learning_rate": 9.903331102661001e-05, "loss": 3.2076, "step": 36450 }, { "epoch": 0.32232995990745156, "grad_norm": 3.5824601650238037, "learning_rate": 9.903058981843242e-05, "loss": 3.3151, "step": 36500 }, { "epoch": 0.3227715077977357, "grad_norm": 4.121826171875, "learning_rate": 9.902786482305753e-05, "loss": 3.1602, "step": 36550 }, { "epoch": 0.32321305568801995, "grad_norm": 2.1794791221618652, "learning_rate": 9.902513604069583e-05, "loss": 3.3487, "step": 36600 }, { "epoch": 0.3236546035783041, "grad_norm": 3.4854958057403564, "learning_rate": 9.902240347155809e-05, "loss": 3.5573, "step": 36650 }, { "epoch": 0.3240961514685883, "grad_norm": 2.346065044403076, "learning_rate": 9.901966711585538e-05, "loss": 2.7336, "step": 36700 }, { "epoch": 0.32453769935887244, "grad_norm": 1.1362333297729492, "learning_rate": 9.901692697379905e-05, "loss": 3.5597, "step": 36750 }, { "epoch": 0.32497924724915667, "grad_norm": 1.6578530073165894, "learning_rate": 9.901418304560077e-05, "loss": 3.4961, "step": 36800 }, { "epoch": 0.32542079513944083, "grad_norm": 1.7271004915237427, "learning_rate": 9.901143533147248e-05, "loss": 3.1233, "step": 36850 }, { "epoch": 0.325862343029725, "grad_norm": 5.3198652267456055, "learning_rate": 9.900868383162643e-05, "loss": 3.6441, "step": 36900 }, { "epoch": 0.32630389092000917, "grad_norm": 1.4437021017074585, "learning_rate": 9.900592854627511e-05, "loss": 3.239, "step": 36950 }, { "epoch": 0.3267454388102934, "grad_norm": 3.09059739112854, "learning_rate": 9.900316947563138e-05, "loss": 3.3918, "step": 37000 }, { "epoch": 0.32718698670057755, "grad_norm": 1.7625781297683716, "learning_rate": 9.900040661990835e-05, "loss": 3.5584, "step": 37050 }, { "epoch": 0.3276285345908617, "grad_norm": 1.064771294593811, "learning_rate": 9.899763997931942e-05, "loss": 3.0205, "step": 37100 }, { "epoch": 0.3280700824811459, "grad_norm": 2.178464889526367, "learning_rate": 9.899486955407826e-05, "loss": 3.2899, "step": 37150 }, { "epoch": 0.3285116303714301, "grad_norm": 1.889468789100647, "learning_rate": 9.899209534439891e-05, "loss": 3.3759, "step": 37200 }, { "epoch": 0.3289531782617143, "grad_norm": 3.320777654647827, "learning_rate": 9.898931735049565e-05, "loss": 3.6612, "step": 37250 }, { "epoch": 0.32939472615199844, "grad_norm": 4.364492416381836, "learning_rate": 9.898653557258303e-05, "loss": 3.5972, "step": 37300 }, { "epoch": 0.3298362740422826, "grad_norm": 4.426956653594971, "learning_rate": 9.898375001087593e-05, "loss": 3.5658, "step": 37350 }, { "epoch": 0.33027782193256683, "grad_norm": 1.2732926607131958, "learning_rate": 9.898096066558951e-05, "loss": 3.1916, "step": 37400 }, { "epoch": 0.330719369822851, "grad_norm": 1.8885505199432373, "learning_rate": 9.897816753693922e-05, "loss": 3.6108, "step": 37450 }, { "epoch": 0.33116091771313516, "grad_norm": 5.467677116394043, "learning_rate": 9.897537062514081e-05, "loss": 3.079, "step": 37500 }, { "epoch": 0.3316024656034193, "grad_norm": 3.006726026535034, "learning_rate": 9.897256993041033e-05, "loss": 3.3902, "step": 37550 }, { "epoch": 0.33204401349370355, "grad_norm": 3.979788064956665, "learning_rate": 9.896976545296411e-05, "loss": 2.8985, "step": 37600 }, { "epoch": 0.3324855613839877, "grad_norm": 0.933592677116394, "learning_rate": 9.896695719301874e-05, "loss": 3.3717, "step": 37650 }, { "epoch": 0.3329271092742719, "grad_norm": 2.022643804550171, "learning_rate": 9.896414515079115e-05, "loss": 3.2212, "step": 37700 }, { "epoch": 0.33336865716455605, "grad_norm": 1.9231008291244507, "learning_rate": 9.896132932649857e-05, "loss": 3.0829, "step": 37750 }, { "epoch": 0.33381020505484027, "grad_norm": 6.672185897827148, "learning_rate": 9.895850972035847e-05, "loss": 3.0794, "step": 37800 }, { "epoch": 0.33425175294512444, "grad_norm": 2.566579580307007, "learning_rate": 9.895568633258865e-05, "loss": 3.2348, "step": 37850 }, { "epoch": 0.3346933008354086, "grad_norm": 3.8691163063049316, "learning_rate": 9.895285916340719e-05, "loss": 3.1823, "step": 37900 }, { "epoch": 0.33513484872569277, "grad_norm": 1.986522912979126, "learning_rate": 9.895002821303246e-05, "loss": 3.4586, "step": 37950 }, { "epoch": 0.335576396615977, "grad_norm": 1.734668254852295, "learning_rate": 9.894719348168315e-05, "loss": 3.7534, "step": 38000 }, { "epoch": 0.33601794450626116, "grad_norm": 1.9242331981658936, "learning_rate": 9.894435496957819e-05, "loss": 3.3395, "step": 38050 }, { "epoch": 0.3364594923965453, "grad_norm": 1.77113676071167, "learning_rate": 9.894151267693686e-05, "loss": 3.352, "step": 38100 }, { "epoch": 0.3369010402868295, "grad_norm": 0.799421489238739, "learning_rate": 9.893866660397867e-05, "loss": 3.7193, "step": 38150 }, { "epoch": 0.3373425881771137, "grad_norm": 1.236433982849121, "learning_rate": 9.89358167509235e-05, "loss": 3.5647, "step": 38200 }, { "epoch": 0.3377841360673979, "grad_norm": 3.1852049827575684, "learning_rate": 9.893296311799143e-05, "loss": 3.477, "step": 38250 }, { "epoch": 0.33822568395768204, "grad_norm": 2.2389414310455322, "learning_rate": 9.89301057054029e-05, "loss": 2.8368, "step": 38300 }, { "epoch": 0.3386672318479662, "grad_norm": 6.785423278808594, "learning_rate": 9.892724451337863e-05, "loss": 3.3759, "step": 38350 }, { "epoch": 0.33910877973825043, "grad_norm": 4.653458595275879, "learning_rate": 9.892437954213958e-05, "loss": 3.353, "step": 38400 }, { "epoch": 0.3395503276285346, "grad_norm": 3.1158320903778076, "learning_rate": 9.892151079190711e-05, "loss": 3.5522, "step": 38450 }, { "epoch": 0.33999187551881876, "grad_norm": 2.8249120712280273, "learning_rate": 9.891863826290277e-05, "loss": 3.3592, "step": 38500 }, { "epoch": 0.34043342340910293, "grad_norm": 2.011695384979248, "learning_rate": 9.891576195534842e-05, "loss": 3.3599, "step": 38550 }, { "epoch": 0.34087497129938715, "grad_norm": 1.8144574165344238, "learning_rate": 9.891288186946628e-05, "loss": 3.3011, "step": 38600 }, { "epoch": 0.3413165191896713, "grad_norm": 2.060272455215454, "learning_rate": 9.890999800547878e-05, "loss": 3.4336, "step": 38650 }, { "epoch": 0.3417580670799555, "grad_norm": 1.401672124862671, "learning_rate": 9.890711036360867e-05, "loss": 3.1762, "step": 38700 }, { "epoch": 0.34219961497023965, "grad_norm": 1.6355420351028442, "learning_rate": 9.8904218944079e-05, "loss": 3.6908, "step": 38750 }, { "epoch": 0.3426411628605239, "grad_norm": 2.591089963912964, "learning_rate": 9.890132374711312e-05, "loss": 3.3153, "step": 38800 }, { "epoch": 0.34308271075080804, "grad_norm": 2.8402106761932373, "learning_rate": 9.889842477293465e-05, "loss": 3.5123, "step": 38850 }, { "epoch": 0.3435242586410922, "grad_norm": 0.9350060820579529, "learning_rate": 9.889552202176751e-05, "loss": 3.3598, "step": 38900 }, { "epoch": 0.3439658065313764, "grad_norm": 2.9623401165008545, "learning_rate": 9.889261549383591e-05, "loss": 3.1874, "step": 38950 }, { "epoch": 0.3444073544216606, "grad_norm": 1.7671315670013428, "learning_rate": 9.888970518936438e-05, "loss": 3.5201, "step": 39000 }, { "epoch": 0.3444073544216606, "eval_asr_loss": 0.8851676637238344, "eval_loss": 3.074697494506836, "eval_runtime": 20.3913, "eval_samples_per_second": 37.663, "eval_steps_per_second": 9.416, "eval_tts_loss": 5.894229360095819, "step": 39000 }, { "epoch": 0.34484890231194476, "grad_norm": 1.1440892219543457, "learning_rate": 9.888679110857768e-05, "loss": 3.4325, "step": 39050 }, { "epoch": 0.3452904502022289, "grad_norm": 2.065777540206909, "learning_rate": 9.888387325170093e-05, "loss": 3.3255, "step": 39100 }, { "epoch": 0.3457319980925131, "grad_norm": 5.079590320587158, "learning_rate": 9.888095161895947e-05, "loss": 3.1736, "step": 39150 }, { "epoch": 0.3461735459827973, "grad_norm": 2.521078586578369, "learning_rate": 9.887802621057902e-05, "loss": 3.6232, "step": 39200 }, { "epoch": 0.3466150938730815, "grad_norm": 3.053846836090088, "learning_rate": 9.88750970267855e-05, "loss": 3.2633, "step": 39250 }, { "epoch": 0.34705664176336565, "grad_norm": 1.3491613864898682, "learning_rate": 9.88721640678052e-05, "loss": 3.7012, "step": 39300 }, { "epoch": 0.3474981896536498, "grad_norm": 1.5826070308685303, "learning_rate": 9.886922733386463e-05, "loss": 3.3255, "step": 39350 }, { "epoch": 0.34793973754393404, "grad_norm": 5.572771072387695, "learning_rate": 9.886628682519069e-05, "loss": 3.2976, "step": 39400 }, { "epoch": 0.3483812854342182, "grad_norm": 1.5839028358459473, "learning_rate": 9.886334254201041e-05, "loss": 3.3149, "step": 39450 }, { "epoch": 0.34882283332450237, "grad_norm": 3.295896053314209, "learning_rate": 9.88603944845513e-05, "loss": 2.9223, "step": 39500 }, { "epoch": 0.34926438121478653, "grad_norm": 2.2568423748016357, "learning_rate": 9.885744265304104e-05, "loss": 3.527, "step": 39550 }, { "epoch": 0.34970592910507076, "grad_norm": 0.9765694737434387, "learning_rate": 9.885448704770762e-05, "loss": 3.4622, "step": 39600 }, { "epoch": 0.3501474769953549, "grad_norm": 1.8776651620864868, "learning_rate": 9.885152766877936e-05, "loss": 3.327, "step": 39650 }, { "epoch": 0.3505890248856391, "grad_norm": 3.4537465572357178, "learning_rate": 9.884856451648483e-05, "loss": 3.5304, "step": 39700 }, { "epoch": 0.35103057277592326, "grad_norm": 2.3317794799804688, "learning_rate": 9.884559759105292e-05, "loss": 3.485, "step": 39750 }, { "epoch": 0.3514721206662075, "grad_norm": 2.5425727367401123, "learning_rate": 9.88426268927128e-05, "loss": 3.5912, "step": 39800 }, { "epoch": 0.35191366855649164, "grad_norm": 0.6102803945541382, "learning_rate": 9.883965242169392e-05, "loss": 3.0589, "step": 39850 }, { "epoch": 0.3523552164467758, "grad_norm": 2.142951726913452, "learning_rate": 9.883667417822604e-05, "loss": 3.4354, "step": 39900 }, { "epoch": 0.35279676433706, "grad_norm": 0.8856062293052673, "learning_rate": 9.883369216253919e-05, "loss": 3.3475, "step": 39950 }, { "epoch": 0.3532383122273442, "grad_norm": 1.8055260181427002, "learning_rate": 9.883070637486374e-05, "loss": 3.2718, "step": 40000 }, { "epoch": 0.35367986011762836, "grad_norm": 1.1027382612228394, "learning_rate": 9.882777664358069e-05, "loss": 3.1336, "step": 40050 }, { "epoch": 0.35412140800791253, "grad_norm": 5.140567779541016, "learning_rate": 9.882478338804843e-05, "loss": 3.3178, "step": 40100 }, { "epoch": 0.3545629558981967, "grad_norm": 2.794602155685425, "learning_rate": 9.882178636121568e-05, "loss": 3.8837, "step": 40150 }, { "epoch": 0.3550045037884809, "grad_norm": 2.1202375888824463, "learning_rate": 9.881878556331392e-05, "loss": 3.5493, "step": 40200 }, { "epoch": 0.3554460516787651, "grad_norm": 0.8689738512039185, "learning_rate": 9.881578099457496e-05, "loss": 3.29, "step": 40250 }, { "epoch": 0.35588759956904925, "grad_norm": 2.1677334308624268, "learning_rate": 9.881277265523085e-05, "loss": 3.2762, "step": 40300 }, { "epoch": 0.3563291474593334, "grad_norm": 1.659857988357544, "learning_rate": 9.880976054551399e-05, "loss": 3.8149, "step": 40350 }, { "epoch": 0.35677069534961764, "grad_norm": 2.109435558319092, "learning_rate": 9.880674466565701e-05, "loss": 3.3179, "step": 40400 }, { "epoch": 0.3572122432399018, "grad_norm": 2.959153175354004, "learning_rate": 9.880372501589289e-05, "loss": 3.5068, "step": 40450 }, { "epoch": 0.35765379113018597, "grad_norm": 0.8862214088439941, "learning_rate": 9.880070159645483e-05, "loss": 3.593, "step": 40500 }, { "epoch": 0.35809533902047014, "grad_norm": 4.1659464836120605, "learning_rate": 9.879767440757641e-05, "loss": 3.1608, "step": 40550 }, { "epoch": 0.35853688691075436, "grad_norm": 4.7671799659729, "learning_rate": 9.879464344949143e-05, "loss": 3.3211, "step": 40600 }, { "epoch": 0.3589784348010385, "grad_norm": 5.715523719787598, "learning_rate": 9.8791608722434e-05, "loss": 3.0981, "step": 40650 }, { "epoch": 0.3594199826913227, "grad_norm": 1.8158924579620361, "learning_rate": 9.878857022663855e-05, "loss": 2.8897, "step": 40700 }, { "epoch": 0.35986153058160686, "grad_norm": 2.091034412384033, "learning_rate": 9.878552796233976e-05, "loss": 3.6337, "step": 40750 }, { "epoch": 0.3603030784718911, "grad_norm": 3.8585503101348877, "learning_rate": 9.878248192977262e-05, "loss": 3.5439, "step": 40800 }, { "epoch": 0.36074462636217525, "grad_norm": 2.2025511264801025, "learning_rate": 9.877943212917242e-05, "loss": 3.3184, "step": 40850 }, { "epoch": 0.3611861742524594, "grad_norm": 3.991384744644165, "learning_rate": 9.877637856077473e-05, "loss": 3.4019, "step": 40900 }, { "epoch": 0.3616277221427436, "grad_norm": 4.2655534744262695, "learning_rate": 9.87733212248154e-05, "loss": 2.82, "step": 40950 }, { "epoch": 0.3620692700330278, "grad_norm": 1.683498740196228, "learning_rate": 9.877026012153059e-05, "loss": 3.3487, "step": 41000 }, { "epoch": 0.36251081792331197, "grad_norm": 2.264552593231201, "learning_rate": 9.876719525115675e-05, "loss": 2.975, "step": 41050 }, { "epoch": 0.36295236581359613, "grad_norm": 2.6153547763824463, "learning_rate": 9.876412661393061e-05, "loss": 3.3764, "step": 41100 }, { "epoch": 0.3633939137038803, "grad_norm": 1.591480016708374, "learning_rate": 9.876105421008919e-05, "loss": 2.965, "step": 41150 }, { "epoch": 0.3638354615941645, "grad_norm": 2.9769952297210693, "learning_rate": 9.875797803986983e-05, "loss": 3.2887, "step": 41200 }, { "epoch": 0.3642770094844487, "grad_norm": 5.074538230895996, "learning_rate": 9.875489810351011e-05, "loss": 3.5681, "step": 41250 }, { "epoch": 0.36471855737473285, "grad_norm": 2.3003244400024414, "learning_rate": 9.875181440124794e-05, "loss": 3.443, "step": 41300 }, { "epoch": 0.365160105265017, "grad_norm": 2.3685524463653564, "learning_rate": 9.874872693332152e-05, "loss": 3.1724, "step": 41350 }, { "epoch": 0.36560165315530124, "grad_norm": 0.6377138495445251, "learning_rate": 9.874563569996931e-05, "loss": 3.4932, "step": 41400 }, { "epoch": 0.3660432010455854, "grad_norm": 1.721374750137329, "learning_rate": 9.874254070143009e-05, "loss": 3.2422, "step": 41450 }, { "epoch": 0.3664847489358696, "grad_norm": 2.2348365783691406, "learning_rate": 9.873944193794294e-05, "loss": 3.0912, "step": 41500 }, { "epoch": 0.36692629682615374, "grad_norm": 1.8013442754745483, "learning_rate": 9.873633940974719e-05, "loss": 3.4616, "step": 41550 }, { "epoch": 0.36736784471643796, "grad_norm": 1.4249016046524048, "learning_rate": 9.873323311708249e-05, "loss": 3.2594, "step": 41600 }, { "epoch": 0.36780939260672213, "grad_norm": 1.336929440498352, "learning_rate": 9.873012306018879e-05, "loss": 3.4553, "step": 41650 }, { "epoch": 0.3682509404970063, "grad_norm": 1.3156746625900269, "learning_rate": 9.872700923930629e-05, "loss": 3.5638, "step": 41700 }, { "epoch": 0.36869248838729046, "grad_norm": 1.1224370002746582, "learning_rate": 9.872389165467553e-05, "loss": 3.131, "step": 41750 }, { "epoch": 0.3691340362775747, "grad_norm": 1.9698013067245483, "learning_rate": 9.87207703065373e-05, "loss": 3.2832, "step": 41800 }, { "epoch": 0.36957558416785885, "grad_norm": 1.916576623916626, "learning_rate": 9.87176451951327e-05, "loss": 3.8032, "step": 41850 }, { "epoch": 0.370017132058143, "grad_norm": 1.1512713432312012, "learning_rate": 9.871451632070312e-05, "loss": 3.4968, "step": 41900 }, { "epoch": 0.3704586799484272, "grad_norm": 1.165654182434082, "learning_rate": 9.871138368349025e-05, "loss": 3.7091, "step": 41950 }, { "epoch": 0.3709002278387114, "grad_norm": 0.8057221174240112, "learning_rate": 9.870824728373604e-05, "loss": 2.845, "step": 42000 }, { "epoch": 0.3709002278387114, "eval_asr_loss": 0.8905537465911811, "eval_loss": 3.063842535018921, "eval_runtime": 20.3425, "eval_samples_per_second": 37.754, "eval_steps_per_second": 9.438, "eval_tts_loss": 5.883050221472156, "step": 42000 }, { "epoch": 0.37134177572899557, "grad_norm": 9.080826759338379, "learning_rate": 9.870510712168278e-05, "loss": 3.178, "step": 42050 }, { "epoch": 0.37178332361927974, "grad_norm": 1.3995147943496704, "learning_rate": 9.870196319757299e-05, "loss": 3.316, "step": 42100 }, { "epoch": 0.3722248715095639, "grad_norm": 2.64806866645813, "learning_rate": 9.869887850223219e-05, "loss": 3.0827, "step": 42150 }, { "epoch": 0.3726664193998481, "grad_norm": 0.9530137181282043, "learning_rate": 9.869572712996722e-05, "loss": 3.2979, "step": 42200 }, { "epoch": 0.3731079672901323, "grad_norm": 2.155907392501831, "learning_rate": 9.869257199637023e-05, "loss": 3.2503, "step": 42250 }, { "epoch": 0.37354951518041646, "grad_norm": 1.780704379081726, "learning_rate": 9.868941310168498e-05, "loss": 3.4232, "step": 42300 }, { "epoch": 0.3739910630707006, "grad_norm": 0.6130134463310242, "learning_rate": 9.868625044615543e-05, "loss": 3.2393, "step": 42350 }, { "epoch": 0.37443261096098485, "grad_norm": 1.8535226583480835, "learning_rate": 9.868308403002589e-05, "loss": 3.2783, "step": 42400 }, { "epoch": 0.374874158851269, "grad_norm": 3.596348285675049, "learning_rate": 9.867991385354092e-05, "loss": 3.157, "step": 42450 }, { "epoch": 0.3753157067415532, "grad_norm": 8.728959083557129, "learning_rate": 9.86767399169454e-05, "loss": 3.542, "step": 42500 }, { "epoch": 0.37575725463183735, "grad_norm": 1.9934968948364258, "learning_rate": 9.867356222048448e-05, "loss": 3.4873, "step": 42550 }, { "epoch": 0.37619880252212157, "grad_norm": 2.618511915206909, "learning_rate": 9.867038076440362e-05, "loss": 3.1409, "step": 42600 }, { "epoch": 0.37664035041240573, "grad_norm": 1.344989538192749, "learning_rate": 9.866719554894857e-05, "loss": 3.4496, "step": 42650 }, { "epoch": 0.3770818983026899, "grad_norm": 1.429974913597107, "learning_rate": 9.866400657436534e-05, "loss": 3.1867, "step": 42700 }, { "epoch": 0.37752344619297407, "grad_norm": 1.021186351776123, "learning_rate": 9.866081384090026e-05, "loss": 2.994, "step": 42750 }, { "epoch": 0.3779649940832583, "grad_norm": 2.500567674636841, "learning_rate": 9.865761734879994e-05, "loss": 2.9304, "step": 42800 }, { "epoch": 0.37840654197354245, "grad_norm": 1.401653528213501, "learning_rate": 9.865441709831129e-05, "loss": 3.5495, "step": 42850 }, { "epoch": 0.3788480898638266, "grad_norm": 2.189068555831909, "learning_rate": 9.865121308968151e-05, "loss": 3.5045, "step": 42900 }, { "epoch": 0.3792896377541108, "grad_norm": 1.583028793334961, "learning_rate": 9.864800532315806e-05, "loss": 3.4075, "step": 42950 }, { "epoch": 0.379731185644395, "grad_norm": 1.778917908668518, "learning_rate": 9.864479379898871e-05, "loss": 3.3313, "step": 43000 }, { "epoch": 0.3801727335346792, "grad_norm": 2.908151626586914, "learning_rate": 9.864157851742154e-05, "loss": 3.1747, "step": 43050 }, { "epoch": 0.38061428142496334, "grad_norm": 3.3366377353668213, "learning_rate": 9.86383594787049e-05, "loss": 3.5529, "step": 43100 }, { "epoch": 0.3810558293152475, "grad_norm": 1.980222225189209, "learning_rate": 9.863513668308744e-05, "loss": 3.559, "step": 43150 }, { "epoch": 0.38149737720553173, "grad_norm": 2.1785378456115723, "learning_rate": 9.863191013081807e-05, "loss": 3.3076, "step": 43200 }, { "epoch": 0.3819389250958159, "grad_norm": 3.69520902633667, "learning_rate": 9.862867982214604e-05, "loss": 3.4053, "step": 43250 }, { "epoch": 0.38238047298610006, "grad_norm": 5.211033821105957, "learning_rate": 9.862544575732085e-05, "loss": 3.2504, "step": 43300 }, { "epoch": 0.38282202087638423, "grad_norm": 2.920567750930786, "learning_rate": 9.862220793659232e-05, "loss": 3.1881, "step": 43350 }, { "epoch": 0.38326356876666845, "grad_norm": 2.8354828357696533, "learning_rate": 9.86189663602105e-05, "loss": 3.6872, "step": 43400 }, { "epoch": 0.3837051166569526, "grad_norm": 3.6911203861236572, "learning_rate": 9.861572102842583e-05, "loss": 3.3914, "step": 43450 }, { "epoch": 0.3841466645472368, "grad_norm": 3.9869236946105957, "learning_rate": 9.861247194148895e-05, "loss": 3.3991, "step": 43500 }, { "epoch": 0.38458821243752095, "grad_norm": 2.9851901531219482, "learning_rate": 9.860928419328401e-05, "loss": 3.4688, "step": 43550 }, { "epoch": 0.38502976032780517, "grad_norm": 5.538595199584961, "learning_rate": 9.860602767188645e-05, "loss": 3.3237, "step": 43600 }, { "epoch": 0.38547130821808934, "grad_norm": 0.9150497317314148, "learning_rate": 9.860276739608543e-05, "loss": 3.2588, "step": 43650 }, { "epoch": 0.3859128561083735, "grad_norm": 3.647045612335205, "learning_rate": 9.859950336613275e-05, "loss": 3.1377, "step": 43700 }, { "epoch": 0.38635440399865767, "grad_norm": 0.6097536087036133, "learning_rate": 9.859623558228055e-05, "loss": 3.1102, "step": 43750 }, { "epoch": 0.3867959518889419, "grad_norm": 1.7617607116699219, "learning_rate": 9.859296404478124e-05, "loss": 2.9313, "step": 43800 }, { "epoch": 0.38723749977922606, "grad_norm": 2.4368743896484375, "learning_rate": 9.858968875388752e-05, "loss": 3.2671, "step": 43850 }, { "epoch": 0.3876790476695102, "grad_norm": 1.931584358215332, "learning_rate": 9.858640970985237e-05, "loss": 3.4481, "step": 43900 }, { "epoch": 0.3881205955597944, "grad_norm": 1.9381108283996582, "learning_rate": 9.858312691292906e-05, "loss": 3.2431, "step": 43950 }, { "epoch": 0.3885621434500786, "grad_norm": 2.4838809967041016, "learning_rate": 9.857984036337118e-05, "loss": 3.4532, "step": 44000 }, { "epoch": 0.3890036913403628, "grad_norm": 2.848560094833374, "learning_rate": 9.857655006143259e-05, "loss": 3.3183, "step": 44050 }, { "epoch": 0.38944523923064694, "grad_norm": 2.6281092166900635, "learning_rate": 9.85732560073674e-05, "loss": 3.348, "step": 44100 }, { "epoch": 0.3898867871209311, "grad_norm": 1.351362943649292, "learning_rate": 9.856995820143006e-05, "loss": 3.2875, "step": 44150 }, { "epoch": 0.39032833501121533, "grad_norm": 11.539451599121094, "learning_rate": 9.856665664387535e-05, "loss": 3.8226, "step": 44200 }, { "epoch": 0.3907698829014995, "grad_norm": 2.561694383621216, "learning_rate": 9.856335133495822e-05, "loss": 3.7136, "step": 44250 }, { "epoch": 0.39121143079178367, "grad_norm": 1.2988532781600952, "learning_rate": 9.856004227493402e-05, "loss": 3.5633, "step": 44300 }, { "epoch": 0.39165297868206783, "grad_norm": 2.9327540397644043, "learning_rate": 9.855672946405831e-05, "loss": 3.2624, "step": 44350 }, { "epoch": 0.39209452657235205, "grad_norm": 2.4923012256622314, "learning_rate": 9.855341290258703e-05, "loss": 3.0418, "step": 44400 }, { "epoch": 0.3925360744626362, "grad_norm": 0.7568151354789734, "learning_rate": 9.85500925907763e-05, "loss": 3.5547, "step": 44450 }, { "epoch": 0.3929776223529204, "grad_norm": 1.1965677738189697, "learning_rate": 9.854676852888262e-05, "loss": 3.4921, "step": 44500 }, { "epoch": 0.39341917024320455, "grad_norm": 0.5616858601570129, "learning_rate": 9.854344071716272e-05, "loss": 3.2839, "step": 44550 }, { "epoch": 0.3938607181334888, "grad_norm": 3.225074052810669, "learning_rate": 9.854010915587369e-05, "loss": 3.4744, "step": 44600 }, { "epoch": 0.39430226602377294, "grad_norm": 3.9845869541168213, "learning_rate": 9.853677384527282e-05, "loss": 3.4246, "step": 44650 }, { "epoch": 0.3947438139140571, "grad_norm": 3.029383897781372, "learning_rate": 9.853343478561773e-05, "loss": 3.408, "step": 44700 }, { "epoch": 0.3951853618043413, "grad_norm": 1.8023637533187866, "learning_rate": 9.853009197716638e-05, "loss": 3.1036, "step": 44750 }, { "epoch": 0.3956269096946255, "grad_norm": 0.8640767931938171, "learning_rate": 9.852674542017694e-05, "loss": 3.3368, "step": 44800 }, { "epoch": 0.39606845758490966, "grad_norm": 0.6954994797706604, "learning_rate": 9.852339511490792e-05, "loss": 2.9964, "step": 44850 }, { "epoch": 0.3965100054751938, "grad_norm": 8.39139461517334, "learning_rate": 9.85200410616181e-05, "loss": 2.9249, "step": 44900 }, { "epoch": 0.396951553365478, "grad_norm": 4.7654900550842285, "learning_rate": 9.851668326056652e-05, "loss": 3.0992, "step": 44950 }, { "epoch": 0.3973931012557622, "grad_norm": 1.2103265523910522, "learning_rate": 9.851332171201258e-05, "loss": 3.3672, "step": 45000 }, { "epoch": 0.3973931012557622, "eval_asr_loss": 0.8913320451830068, "eval_loss": 3.0676581859588623, "eval_runtime": 20.2524, "eval_samples_per_second": 37.921, "eval_steps_per_second": 9.48, "eval_tts_loss": 5.97925205245277, "step": 45000 }, { "epoch": 0.3978346491460464, "grad_norm": 2.4184420108795166, "learning_rate": 9.850995641621592e-05, "loss": 3.6283, "step": 45050 }, { "epoch": 0.39827619703633055, "grad_norm": 1.4427285194396973, "learning_rate": 9.850658737343648e-05, "loss": 3.5224, "step": 45100 }, { "epoch": 0.3987177449266147, "grad_norm": 6.392005920410156, "learning_rate": 9.850321458393447e-05, "loss": 3.0993, "step": 45150 }, { "epoch": 0.39915929281689894, "grad_norm": 1.4388530254364014, "learning_rate": 9.849983804797046e-05, "loss": 3.0099, "step": 45200 }, { "epoch": 0.3996008407071831, "grad_norm": 2.679241180419922, "learning_rate": 9.849645776580522e-05, "loss": 3.3363, "step": 45250 }, { "epoch": 0.40004238859746727, "grad_norm": 1.61045503616333, "learning_rate": 9.849307373769985e-05, "loss": 3.1612, "step": 45300 }, { "epoch": 0.40048393648775144, "grad_norm": 1.9430288076400757, "learning_rate": 9.848968596391574e-05, "loss": 3.4328, "step": 45350 }, { "epoch": 0.40092548437803566, "grad_norm": 1.6506637334823608, "learning_rate": 9.848629444471456e-05, "loss": 3.6278, "step": 45400 }, { "epoch": 0.4013670322683198, "grad_norm": 2.5751986503601074, "learning_rate": 9.848289918035831e-05, "loss": 3.3468, "step": 45450 }, { "epoch": 0.401808580158604, "grad_norm": 2.1925528049468994, "learning_rate": 9.84795001711092e-05, "loss": 3.3276, "step": 45500 }, { "epoch": 0.40225012804888816, "grad_norm": 3.269740581512451, "learning_rate": 9.847609741722981e-05, "loss": 2.7999, "step": 45550 }, { "epoch": 0.4026916759391724, "grad_norm": 4.853845596313477, "learning_rate": 9.847269091898294e-05, "loss": 3.1533, "step": 45600 }, { "epoch": 0.40313322382945654, "grad_norm": 2.3576786518096924, "learning_rate": 9.846928067663175e-05, "loss": 3.4268, "step": 45650 }, { "epoch": 0.4035747717197407, "grad_norm": 1.3526619672775269, "learning_rate": 9.846586669043964e-05, "loss": 3.156, "step": 45700 }, { "epoch": 0.4040163196100249, "grad_norm": 1.5202453136444092, "learning_rate": 9.846244896067031e-05, "loss": 3.2914, "step": 45750 }, { "epoch": 0.4044578675003091, "grad_norm": 1.80320143699646, "learning_rate": 9.845902748758773e-05, "loss": 3.2984, "step": 45800 }, { "epoch": 0.40489941539059326, "grad_norm": 1.7634551525115967, "learning_rate": 9.845560227145621e-05, "loss": 3.1742, "step": 45850 }, { "epoch": 0.40534096328087743, "grad_norm": 2.447131633758545, "learning_rate": 9.84521733125403e-05, "loss": 3.0532, "step": 45900 }, { "epoch": 0.4057825111711616, "grad_norm": 2.7054214477539062, "learning_rate": 9.844874061110485e-05, "loss": 3.5135, "step": 45950 }, { "epoch": 0.4062240590614458, "grad_norm": 0.9877326488494873, "learning_rate": 9.844530416741505e-05, "loss": 3.4712, "step": 46000 }, { "epoch": 0.40666560695173, "grad_norm": 1.2479236125946045, "learning_rate": 9.84418639817363e-05, "loss": 3.5099, "step": 46050 }, { "epoch": 0.40710715484201415, "grad_norm": 3.176802635192871, "learning_rate": 9.843842005433431e-05, "loss": 3.5577, "step": 46100 }, { "epoch": 0.4075487027322983, "grad_norm": 1.3120348453521729, "learning_rate": 9.843497238547513e-05, "loss": 2.9955, "step": 46150 }, { "epoch": 0.40799025062258254, "grad_norm": 1.1844558715820312, "learning_rate": 9.843152097542505e-05, "loss": 3.6275, "step": 46200 }, { "epoch": 0.4084317985128667, "grad_norm": 2.701456308364868, "learning_rate": 9.842806582445067e-05, "loss": 3.8021, "step": 46250 }, { "epoch": 0.4088733464031509, "grad_norm": 2.6151137351989746, "learning_rate": 9.842460693281885e-05, "loss": 3.6685, "step": 46300 }, { "epoch": 0.40931489429343504, "grad_norm": 2.728315591812134, "learning_rate": 9.842114430079679e-05, "loss": 3.4043, "step": 46350 }, { "epoch": 0.40975644218371926, "grad_norm": 2.2898852825164795, "learning_rate": 9.841767792865191e-05, "loss": 3.6381, "step": 46400 }, { "epoch": 0.4101979900740034, "grad_norm": 1.9836546182632446, "learning_rate": 9.8414207816652e-05, "loss": 3.0714, "step": 46450 }, { "epoch": 0.4106395379642876, "grad_norm": 2.8951032161712646, "learning_rate": 9.841073396506506e-05, "loss": 3.4733, "step": 46500 }, { "epoch": 0.41108108585457176, "grad_norm": 1.4405927658081055, "learning_rate": 9.840725637415945e-05, "loss": 3.2872, "step": 46550 }, { "epoch": 0.411522633744856, "grad_norm": 3.1269359588623047, "learning_rate": 9.840377504420376e-05, "loss": 3.4911, "step": 46600 }, { "epoch": 0.41196418163514015, "grad_norm": 3.698716163635254, "learning_rate": 9.840028997546689e-05, "loss": 3.1268, "step": 46650 }, { "epoch": 0.4124057295254243, "grad_norm": 2.955763578414917, "learning_rate": 9.839680116821804e-05, "loss": 3.5459, "step": 46700 }, { "epoch": 0.4128472774157085, "grad_norm": 0.848429799079895, "learning_rate": 9.83933086227267e-05, "loss": 3.2194, "step": 46750 }, { "epoch": 0.4132888253059927, "grad_norm": 8.197107315063477, "learning_rate": 9.838981233926264e-05, "loss": 3.2381, "step": 46800 }, { "epoch": 0.41373037319627687, "grad_norm": 1.4284354448318481, "learning_rate": 9.83863123180959e-05, "loss": 3.4783, "step": 46850 }, { "epoch": 0.41417192108656103, "grad_norm": 1.3862097263336182, "learning_rate": 9.838280855949684e-05, "loss": 3.3749, "step": 46900 }, { "epoch": 0.41461346897684526, "grad_norm": 1.5118871927261353, "learning_rate": 9.837930106373609e-05, "loss": 3.2749, "step": 46950 }, { "epoch": 0.4150550168671294, "grad_norm": 4.723493576049805, "learning_rate": 9.83757898310846e-05, "loss": 2.9933, "step": 47000 }, { "epoch": 0.4154965647574136, "grad_norm": 6.659562110900879, "learning_rate": 9.837227486181353e-05, "loss": 3.4323, "step": 47050 }, { "epoch": 0.41593811264769776, "grad_norm": 2.5123307704925537, "learning_rate": 9.836875615619443e-05, "loss": 2.9715, "step": 47100 }, { "epoch": 0.416379660537982, "grad_norm": 2.6539173126220703, "learning_rate": 9.836523371449906e-05, "loss": 3.1207, "step": 47150 }, { "epoch": 0.41682120842826614, "grad_norm": 3.233375072479248, "learning_rate": 9.836170753699952e-05, "loss": 3.1869, "step": 47200 }, { "epoch": 0.4172627563185503, "grad_norm": 1.2898887395858765, "learning_rate": 9.835817762396816e-05, "loss": 3.2155, "step": 47250 }, { "epoch": 0.4177043042088345, "grad_norm": 0.9073876738548279, "learning_rate": 9.835464397567765e-05, "loss": 3.363, "step": 47300 }, { "epoch": 0.4181458520991187, "grad_norm": 2.146453380584717, "learning_rate": 9.835110659240092e-05, "loss": 3.6663, "step": 47350 }, { "epoch": 0.41858739998940286, "grad_norm": 4.763782501220703, "learning_rate": 9.834756547441122e-05, "loss": 3.5419, "step": 47400 }, { "epoch": 0.41902894787968703, "grad_norm": 2.6837880611419678, "learning_rate": 9.834402062198207e-05, "loss": 3.134, "step": 47450 }, { "epoch": 0.4194704957699712, "grad_norm": 2.5617268085479736, "learning_rate": 9.834047203538726e-05, "loss": 3.4525, "step": 47500 }, { "epoch": 0.4199120436602554, "grad_norm": 2.1954944133758545, "learning_rate": 9.83369197149009e-05, "loss": 3.7715, "step": 47550 }, { "epoch": 0.4203535915505396, "grad_norm": 1.5438072681427002, "learning_rate": 9.833336366079737e-05, "loss": 3.6781, "step": 47600 }, { "epoch": 0.42079513944082375, "grad_norm": 1.757779598236084, "learning_rate": 9.832980387335138e-05, "loss": 3.4219, "step": 47650 }, { "epoch": 0.4212366873311079, "grad_norm": 2.8832337856292725, "learning_rate": 9.832624035283785e-05, "loss": 2.9347, "step": 47700 }, { "epoch": 0.42167823522139214, "grad_norm": 2.0708539485931396, "learning_rate": 9.832274448117773e-05, "loss": 3.1908, "step": 47750 }, { "epoch": 0.4221197831116763, "grad_norm": 1.883726716041565, "learning_rate": 9.831917357000283e-05, "loss": 3.5663, "step": 47800 }, { "epoch": 0.42256133100196047, "grad_norm": 1.2137542963027954, "learning_rate": 9.83155989265815e-05, "loss": 3.3599, "step": 47850 }, { "epoch": 0.42300287889224464, "grad_norm": 1.1021403074264526, "learning_rate": 9.831202055118986e-05, "loss": 3.3176, "step": 47900 }, { "epoch": 0.42344442678252886, "grad_norm": 3.1852636337280273, "learning_rate": 9.830843844410431e-05, "loss": 3.2934, "step": 47950 }, { "epoch": 0.423885974672813, "grad_norm": 2.2362117767333984, "learning_rate": 9.830492435893767e-05, "loss": 3.5098, "step": 48000 }, { "epoch": 0.423885974672813, "eval_asr_loss": 0.8946412356905893, "eval_loss": 3.0450656414031982, "eval_runtime": 20.5825, "eval_samples_per_second": 37.313, "eval_steps_per_second": 9.328, "eval_tts_loss": 5.9677758015827616, "step": 48000 }, { "epoch": 0.4243275225630972, "grad_norm": 0.6300576329231262, "learning_rate": 9.830133486391473e-05, "loss": 3.5818, "step": 48050 }, { "epoch": 0.42476907045338136, "grad_norm": 1.3167023658752441, "learning_rate": 9.829774163802325e-05, "loss": 3.4292, "step": 48100 }, { "epoch": 0.4252106183436656, "grad_norm": 2.811063051223755, "learning_rate": 9.829414468154079e-05, "loss": 3.7977, "step": 48150 }, { "epoch": 0.42565216623394975, "grad_norm": 2.503530502319336, "learning_rate": 9.829054399474516e-05, "loss": 3.6608, "step": 48200 }, { "epoch": 0.4260937141242339, "grad_norm": 2.051328420639038, "learning_rate": 9.828693957791452e-05, "loss": 3.3447, "step": 48250 }, { "epoch": 0.4265352620145181, "grad_norm": 0.9550831913948059, "learning_rate": 9.828333143132724e-05, "loss": 3.2968, "step": 48300 }, { "epoch": 0.4269768099048023, "grad_norm": 3.1451058387756348, "learning_rate": 9.827971955526205e-05, "loss": 3.3196, "step": 48350 }, { "epoch": 0.42741835779508647, "grad_norm": 1.7226669788360596, "learning_rate": 9.827610394999791e-05, "loss": 3.1922, "step": 48400 }, { "epoch": 0.42785990568537063, "grad_norm": 2.289461851119995, "learning_rate": 9.827248461581412e-05, "loss": 3.327, "step": 48450 }, { "epoch": 0.4283014535756548, "grad_norm": 3.5129716396331787, "learning_rate": 9.826886155299023e-05, "loss": 3.4937, "step": 48500 }, { "epoch": 0.428743001465939, "grad_norm": 2.9753642082214355, "learning_rate": 9.826523476180609e-05, "loss": 3.3748, "step": 48550 }, { "epoch": 0.4291845493562232, "grad_norm": 1.0942645072937012, "learning_rate": 9.826160424254185e-05, "loss": 3.391, "step": 48600 }, { "epoch": 0.42962609724650735, "grad_norm": 2.1059162616729736, "learning_rate": 9.825796999547792e-05, "loss": 3.7639, "step": 48650 }, { "epoch": 0.4300676451367915, "grad_norm": 1.9335929155349731, "learning_rate": 9.825433202089503e-05, "loss": 2.9592, "step": 48700 }, { "epoch": 0.43050919302707574, "grad_norm": 0.7627319693565369, "learning_rate": 9.825069031907416e-05, "loss": 3.3292, "step": 48750 }, { "epoch": 0.4309507409173599, "grad_norm": 3.433350086212158, "learning_rate": 9.824704489029664e-05, "loss": 3.3546, "step": 48800 }, { "epoch": 0.4313922888076441, "grad_norm": 2.693155527114868, "learning_rate": 9.824339573484402e-05, "loss": 3.4344, "step": 48850 }, { "epoch": 0.43183383669792824, "grad_norm": 1.1923327445983887, "learning_rate": 9.823974285299817e-05, "loss": 3.3486, "step": 48900 }, { "epoch": 0.43227538458821246, "grad_norm": 1.9175432920455933, "learning_rate": 9.823608624504123e-05, "loss": 3.4314, "step": 48950 }, { "epoch": 0.43271693247849663, "grad_norm": 0.7622149586677551, "learning_rate": 9.823242591125568e-05, "loss": 3.1681, "step": 49000 }, { "epoch": 0.4331584803687808, "grad_norm": 2.3033721446990967, "learning_rate": 9.822876185192421e-05, "loss": 3.4592, "step": 49050 }, { "epoch": 0.43360002825906496, "grad_norm": 0.886083722114563, "learning_rate": 9.822509406732987e-05, "loss": 3.6985, "step": 49100 }, { "epoch": 0.4340415761493492, "grad_norm": 6.617265701293945, "learning_rate": 9.822142255775594e-05, "loss": 3.1759, "step": 49150 }, { "epoch": 0.43448312403963335, "grad_norm": 4.3931121826171875, "learning_rate": 9.821774732348602e-05, "loss": 3.4327, "step": 49200 }, { "epoch": 0.4349246719299175, "grad_norm": 1.2443506717681885, "learning_rate": 9.8214068364804e-05, "loss": 3.5825, "step": 49250 }, { "epoch": 0.4353662198202017, "grad_norm": 2.791949510574341, "learning_rate": 9.821038568199403e-05, "loss": 3.1509, "step": 49300 }, { "epoch": 0.4358077677104859, "grad_norm": 1.996881127357483, "learning_rate": 9.820669927534061e-05, "loss": 2.9644, "step": 49350 }, { "epoch": 0.43624931560077007, "grad_norm": 1.907235026359558, "learning_rate": 9.820300914512842e-05, "loss": 3.3867, "step": 49400 }, { "epoch": 0.43669086349105424, "grad_norm": 3.5670182704925537, "learning_rate": 9.819931529164254e-05, "loss": 3.4446, "step": 49450 }, { "epoch": 0.4371324113813384, "grad_norm": 1.6872813701629639, "learning_rate": 9.819561771516826e-05, "loss": 2.9724, "step": 49500 }, { "epoch": 0.4375739592716226, "grad_norm": 3.4050118923187256, "learning_rate": 9.819191641599121e-05, "loss": 3.2879, "step": 49550 }, { "epoch": 0.4380155071619068, "grad_norm": 2.1053950786590576, "learning_rate": 9.818821139439727e-05, "loss": 3.4533, "step": 49600 }, { "epoch": 0.43845705505219096, "grad_norm": 1.7744102478027344, "learning_rate": 9.818450265067261e-05, "loss": 3.3481, "step": 49650 }, { "epoch": 0.4388986029424751, "grad_norm": 2.2178120613098145, "learning_rate": 9.818079018510374e-05, "loss": 3.3662, "step": 49700 }, { "epoch": 0.43934015083275935, "grad_norm": 2.2515435218811035, "learning_rate": 9.817707399797736e-05, "loss": 3.336, "step": 49750 }, { "epoch": 0.4397816987230435, "grad_norm": 3.4062695503234863, "learning_rate": 9.817335408958056e-05, "loss": 3.2625, "step": 49800 }, { "epoch": 0.4402232466133277, "grad_norm": 1.6740872859954834, "learning_rate": 9.816963046020065e-05, "loss": 3.7033, "step": 49850 }, { "epoch": 0.44066479450361185, "grad_norm": 0.949266254901886, "learning_rate": 9.816590311012525e-05, "loss": 3.2875, "step": 49900 }, { "epoch": 0.44110634239389607, "grad_norm": 2.8111281394958496, "learning_rate": 9.816217203964228e-05, "loss": 3.2595, "step": 49950 }, { "epoch": 0.44154789028418023, "grad_norm": 1.117742657661438, "learning_rate": 9.815843724903993e-05, "loss": 3.2435, "step": 50000 }, { "epoch": 0.4419894381744644, "grad_norm": 2.1841189861297607, "learning_rate": 9.815469873860666e-05, "loss": 3.5207, "step": 50050 }, { "epoch": 0.44243098606474857, "grad_norm": 4.919894695281982, "learning_rate": 9.815095650863124e-05, "loss": 3.3703, "step": 50100 }, { "epoch": 0.4428725339550328, "grad_norm": 3.6214163303375244, "learning_rate": 9.814721055940275e-05, "loss": 3.5246, "step": 50150 }, { "epoch": 0.44331408184531695, "grad_norm": 3.0915398597717285, "learning_rate": 9.814346089121053e-05, "loss": 3.7959, "step": 50200 }, { "epoch": 0.4437556297356011, "grad_norm": 1.7853915691375732, "learning_rate": 9.813970750434419e-05, "loss": 3.4, "step": 50250 }, { "epoch": 0.4441971776258853, "grad_norm": 0.6693574786186218, "learning_rate": 9.813595039909367e-05, "loss": 3.2674, "step": 50300 }, { "epoch": 0.4446387255161695, "grad_norm": 7.433414936065674, "learning_rate": 9.813218957574914e-05, "loss": 3.1686, "step": 50350 }, { "epoch": 0.4450802734064537, "grad_norm": 2.307650566101074, "learning_rate": 9.812842503460114e-05, "loss": 3.2572, "step": 50400 }, { "epoch": 0.44552182129673784, "grad_norm": 4.549615383148193, "learning_rate": 9.812465677594041e-05, "loss": 3.5429, "step": 50450 }, { "epoch": 0.445963369187022, "grad_norm": 1.0888493061065674, "learning_rate": 9.812088480005804e-05, "loss": 3.3837, "step": 50500 }, { "epoch": 0.44640491707730623, "grad_norm": 2.1504311561584473, "learning_rate": 9.811718465752564e-05, "loss": 3.7276, "step": 50550 }, { "epoch": 0.4468464649675904, "grad_norm": 3.0695831775665283, "learning_rate": 9.811340532240422e-05, "loss": 3.4433, "step": 50600 }, { "epoch": 0.44728801285787456, "grad_norm": 1.7197152376174927, "learning_rate": 9.810962227093022e-05, "loss": 3.5762, "step": 50650 }, { "epoch": 0.44772956074815873, "grad_norm": 1.3767517805099487, "learning_rate": 9.810583550339588e-05, "loss": 3.0935, "step": 50700 }, { "epoch": 0.44817110863844295, "grad_norm": 1.5721153020858765, "learning_rate": 9.810204502009364e-05, "loss": 3.4474, "step": 50750 }, { "epoch": 0.4486126565287271, "grad_norm": 4.190256118774414, "learning_rate": 9.809825082131633e-05, "loss": 3.1628, "step": 50800 }, { "epoch": 0.4490542044190113, "grad_norm": 2.406953811645508, "learning_rate": 9.809445290735702e-05, "loss": 3.2441, "step": 50850 }, { "epoch": 0.44949575230929545, "grad_norm": 4.159549236297607, "learning_rate": 9.809065127850903e-05, "loss": 3.0559, "step": 50900 }, { "epoch": 0.44993730019957967, "grad_norm": 1.8754595518112183, "learning_rate": 9.808684593506605e-05, "loss": 3.7703, "step": 50950 }, { "epoch": 0.45037884808986384, "grad_norm": 1.29695725440979, "learning_rate": 9.808303687732196e-05, "loss": 3.1498, "step": 51000 }, { "epoch": 0.45037884808986384, "eval_asr_loss": 0.9004947789321158, "eval_loss": 3.0421018600463867, "eval_runtime": 20.6473, "eval_samples_per_second": 37.196, "eval_steps_per_second": 9.299, "eval_tts_loss": 5.865643483398774, "step": 51000 }, { "epoch": 0.450820395980148, "grad_norm": 2.4158124923706055, "learning_rate": 9.807922410557102e-05, "loss": 3.3356, "step": 51050 }, { "epoch": 0.45126194387043217, "grad_norm": 0.9025440812110901, "learning_rate": 9.807540762010772e-05, "loss": 3.336, "step": 51100 }, { "epoch": 0.4517034917607164, "grad_norm": 5.0518364906311035, "learning_rate": 9.807158742122684e-05, "loss": 3.2288, "step": 51150 }, { "epoch": 0.45214503965100056, "grad_norm": 1.4660353660583496, "learning_rate": 9.806776350922346e-05, "loss": 3.0611, "step": 51200 }, { "epoch": 0.4525865875412847, "grad_norm": 1.7514241933822632, "learning_rate": 9.806393588439297e-05, "loss": 3.304, "step": 51250 }, { "epoch": 0.4530281354315689, "grad_norm": 1.4853622913360596, "learning_rate": 9.806010454703099e-05, "loss": 3.436, "step": 51300 }, { "epoch": 0.4534696833218531, "grad_norm": 2.174837112426758, "learning_rate": 9.805626949743347e-05, "loss": 3.6087, "step": 51350 }, { "epoch": 0.4539112312121373, "grad_norm": 1.5965477228164673, "learning_rate": 9.805243073589665e-05, "loss": 3.4667, "step": 51400 }, { "epoch": 0.45435277910242144, "grad_norm": 2.395080804824829, "learning_rate": 9.804858826271703e-05, "loss": 3.5994, "step": 51450 }, { "epoch": 0.4547943269927056, "grad_norm": 4.734639644622803, "learning_rate": 9.804474207819139e-05, "loss": 3.2731, "step": 51500 }, { "epoch": 0.45523587488298983, "grad_norm": 1.91262948513031, "learning_rate": 9.804089218261684e-05, "loss": 3.4359, "step": 51550 }, { "epoch": 0.455677422773274, "grad_norm": 4.3235321044921875, "learning_rate": 9.803703857629075e-05, "loss": 3.6903, "step": 51600 }, { "epoch": 0.45611897066355817, "grad_norm": 2.399240016937256, "learning_rate": 9.803318125951075e-05, "loss": 3.1958, "step": 51650 }, { "epoch": 0.45656051855384233, "grad_norm": 3.3152384757995605, "learning_rate": 9.802932023257483e-05, "loss": 3.4387, "step": 51700 }, { "epoch": 0.45700206644412655, "grad_norm": 3.167579412460327, "learning_rate": 9.802545549578119e-05, "loss": 3.6247, "step": 51750 }, { "epoch": 0.4574436143344107, "grad_norm": 1.6194385290145874, "learning_rate": 9.802158704942837e-05, "loss": 3.3378, "step": 51800 }, { "epoch": 0.4578851622246949, "grad_norm": 1.213178277015686, "learning_rate": 9.801771489381515e-05, "loss": 3.0608, "step": 51850 }, { "epoch": 0.45832671011497905, "grad_norm": 4.134045600891113, "learning_rate": 9.8013916582878e-05, "loss": 3.1588, "step": 51900 }, { "epoch": 0.4587682580052633, "grad_norm": 3.060990571975708, "learning_rate": 9.801003708381187e-05, "loss": 3.3848, "step": 51950 }, { "epoch": 0.45920980589554744, "grad_norm": 2.216519594192505, "learning_rate": 9.800615387637748e-05, "loss": 3.2701, "step": 52000 }, { "epoch": 0.4596513537858316, "grad_norm": 2.748791456222534, "learning_rate": 9.80022669608748e-05, "loss": 3.6429, "step": 52050 }, { "epoch": 0.4600929016761158, "grad_norm": 2.1832919120788574, "learning_rate": 9.799837633760403e-05, "loss": 3.4686, "step": 52100 }, { "epoch": 0.4605344495664, "grad_norm": 10.31329345703125, "learning_rate": 9.799448200686569e-05, "loss": 3.4104, "step": 52150 }, { "epoch": 0.46097599745668416, "grad_norm": 3.921966791152954, "learning_rate": 9.799058396896062e-05, "loss": 3.1034, "step": 52200 }, { "epoch": 0.4614175453469683, "grad_norm": 1.399453043937683, "learning_rate": 9.798668222418988e-05, "loss": 2.6928, "step": 52250 }, { "epoch": 0.4618590932372525, "grad_norm": 2.0908732414245605, "learning_rate": 9.798277677285483e-05, "loss": 3.2176, "step": 52300 }, { "epoch": 0.4623006411275367, "grad_norm": 1.981628656387329, "learning_rate": 9.797886761525719e-05, "loss": 3.1046, "step": 52350 }, { "epoch": 0.4627421890178209, "grad_norm": 2.1923952102661133, "learning_rate": 9.797495475169886e-05, "loss": 2.9833, "step": 52400 }, { "epoch": 0.46318373690810505, "grad_norm": 4.177052021026611, "learning_rate": 9.79710381824821e-05, "loss": 2.9391, "step": 52450 }, { "epoch": 0.4636252847983892, "grad_norm": 1.3210045099258423, "learning_rate": 9.796711790790941e-05, "loss": 3.5919, "step": 52500 }, { "epoch": 0.46406683268867344, "grad_norm": 1.5212137699127197, "learning_rate": 9.796319392828361e-05, "loss": 3.4421, "step": 52550 }, { "epoch": 0.4645083805789576, "grad_norm": 9.704986572265625, "learning_rate": 9.795926624390781e-05, "loss": 3.2297, "step": 52600 }, { "epoch": 0.46494992846924177, "grad_norm": 0.9471487402915955, "learning_rate": 9.795533485508537e-05, "loss": 3.6265, "step": 52650 }, { "epoch": 0.46539147635952594, "grad_norm": 2.34822154045105, "learning_rate": 9.795139976211996e-05, "loss": 3.2869, "step": 52700 }, { "epoch": 0.46583302424981016, "grad_norm": 1.4003323316574097, "learning_rate": 9.794746096531554e-05, "loss": 3.215, "step": 52750 }, { "epoch": 0.4662745721400943, "grad_norm": 0.5006351470947266, "learning_rate": 9.794351846497634e-05, "loss": 3.3245, "step": 52800 }, { "epoch": 0.4667161200303785, "grad_norm": 1.4579681158065796, "learning_rate": 9.793957226140688e-05, "loss": 3.0416, "step": 52850 }, { "epoch": 0.46715766792066266, "grad_norm": 2.5864365100860596, "learning_rate": 9.793562235491198e-05, "loss": 3.2932, "step": 52900 }, { "epoch": 0.4675992158109469, "grad_norm": 1.7026253938674927, "learning_rate": 9.793166874579673e-05, "loss": 3.1959, "step": 52950 }, { "epoch": 0.46804076370123104, "grad_norm": 5.3310394287109375, "learning_rate": 9.792771143436654e-05, "loss": 3.2207, "step": 53000 }, { "epoch": 0.4684823115915152, "grad_norm": 1.704520583152771, "learning_rate": 9.792375042092704e-05, "loss": 3.1596, "step": 53050 }, { "epoch": 0.4689238594817994, "grad_norm": 2.3095810413360596, "learning_rate": 9.79197857057842e-05, "loss": 3.48, "step": 53100 }, { "epoch": 0.4693654073720836, "grad_norm": 3.7878098487854004, "learning_rate": 9.791581728924427e-05, "loss": 3.4366, "step": 53150 }, { "epoch": 0.46980695526236776, "grad_norm": 1.6754798889160156, "learning_rate": 9.791184517161377e-05, "loss": 3.1798, "step": 53200 }, { "epoch": 0.47024850315265193, "grad_norm": 2.6600844860076904, "learning_rate": 9.79078693531995e-05, "loss": 3.378, "step": 53250 }, { "epoch": 0.4706900510429361, "grad_norm": 1.902320146560669, "learning_rate": 9.790388983430859e-05, "loss": 2.8402, "step": 53300 }, { "epoch": 0.4711315989332203, "grad_norm": 1.9587918519973755, "learning_rate": 9.789990661524839e-05, "loss": 3.6821, "step": 53350 }, { "epoch": 0.4715731468235045, "grad_norm": 0.7448610067367554, "learning_rate": 9.789591969632658e-05, "loss": 3.3176, "step": 53400 }, { "epoch": 0.47201469471378865, "grad_norm": 4.580758094787598, "learning_rate": 9.789192907785111e-05, "loss": 3.2808, "step": 53450 }, { "epoch": 0.4724562426040728, "grad_norm": 2.524980068206787, "learning_rate": 9.788793476013025e-05, "loss": 3.3751, "step": 53500 }, { "epoch": 0.47289779049435704, "grad_norm": 3.0498645305633545, "learning_rate": 9.788393674347249e-05, "loss": 3.1801, "step": 53550 }, { "epoch": 0.4733393383846412, "grad_norm": 2.230107545852661, "learning_rate": 9.787993502818669e-05, "loss": 3.7616, "step": 53600 }, { "epoch": 0.4737808862749254, "grad_norm": 0.8255630731582642, "learning_rate": 9.78759296145819e-05, "loss": 3.0498, "step": 53650 }, { "epoch": 0.47422243416520954, "grad_norm": 4.1301727294921875, "learning_rate": 9.787192050296752e-05, "loss": 3.281, "step": 53700 }, { "epoch": 0.47466398205549376, "grad_norm": 3.9466300010681152, "learning_rate": 9.786790769365322e-05, "loss": 3.1479, "step": 53750 }, { "epoch": 0.4751055299457779, "grad_norm": 1.8435554504394531, "learning_rate": 9.786389118694897e-05, "loss": 3.4114, "step": 53800 }, { "epoch": 0.4755470778360621, "grad_norm": 0.7896223664283752, "learning_rate": 9.785987098316499e-05, "loss": 3.4087, "step": 53850 }, { "epoch": 0.47598862572634626, "grad_norm": 9.002167701721191, "learning_rate": 9.785584708261182e-05, "loss": 3.2257, "step": 53900 }, { "epoch": 0.4764301736166305, "grad_norm": 0.6309633851051331, "learning_rate": 9.785181948560028e-05, "loss": 3.1985, "step": 53950 }, { "epoch": 0.47687172150691465, "grad_norm": 3.459434747695923, "learning_rate": 9.784778819244144e-05, "loss": 3.3152, "step": 54000 }, { "epoch": 0.47687172150691465, "eval_asr_loss": 0.8963724033623304, "eval_loss": 3.0343711376190186, "eval_runtime": 20.7535, "eval_samples_per_second": 37.006, "eval_steps_per_second": 9.251, "eval_tts_loss": 5.906121494862091, "step": 54000 }, { "epoch": 0.4773132693971988, "grad_norm": 2.1301255226135254, "learning_rate": 9.78437532034467e-05, "loss": 3.7664, "step": 54050 }, { "epoch": 0.477754817287483, "grad_norm": 2.864131450653076, "learning_rate": 9.783971451892773e-05, "loss": 3.0815, "step": 54100 }, { "epoch": 0.4781963651777672, "grad_norm": 2.723444938659668, "learning_rate": 9.783567213919649e-05, "loss": 3.2961, "step": 54150 }, { "epoch": 0.47863791306805137, "grad_norm": 2.7087883949279785, "learning_rate": 9.783162606456521e-05, "loss": 3.4694, "step": 54200 }, { "epoch": 0.47907946095833553, "grad_norm": 2.582592248916626, "learning_rate": 9.782757629534642e-05, "loss": 3.4529, "step": 54250 }, { "epoch": 0.4795210088486197, "grad_norm": 1.1580854654312134, "learning_rate": 9.782352283185293e-05, "loss": 3.357, "step": 54300 }, { "epoch": 0.4799625567389039, "grad_norm": 3.442847490310669, "learning_rate": 9.781946567439781e-05, "loss": 3.2244, "step": 54350 }, { "epoch": 0.4804041046291881, "grad_norm": 4.473285675048828, "learning_rate": 9.781540482329447e-05, "loss": 3.2563, "step": 54400 }, { "epoch": 0.48084565251947226, "grad_norm": 2.0757851600646973, "learning_rate": 9.78113402788566e-05, "loss": 3.3494, "step": 54450 }, { "epoch": 0.4812872004097564, "grad_norm": 1.0765964984893799, "learning_rate": 9.78072720413981e-05, "loss": 2.973, "step": 54500 }, { "epoch": 0.48172874830004064, "grad_norm": 2.187049150466919, "learning_rate": 9.780320011123322e-05, "loss": 3.5087, "step": 54550 }, { "epoch": 0.4821702961903248, "grad_norm": 2.3897063732147217, "learning_rate": 9.779912448867649e-05, "loss": 3.3432, "step": 54600 }, { "epoch": 0.482611844080609, "grad_norm": 1.4350836277008057, "learning_rate": 9.779504517404274e-05, "loss": 3.547, "step": 54650 }, { "epoch": 0.48305339197089314, "grad_norm": 1.5999610424041748, "learning_rate": 9.779096216764703e-05, "loss": 3.1709, "step": 54700 }, { "epoch": 0.48349493986117736, "grad_norm": 4.64166784286499, "learning_rate": 9.778687546980474e-05, "loss": 3.6566, "step": 54750 }, { "epoch": 0.48393648775146153, "grad_norm": 2.923461675643921, "learning_rate": 9.778278508083154e-05, "loss": 3.4523, "step": 54800 }, { "epoch": 0.4843780356417457, "grad_norm": 1.1621273756027222, "learning_rate": 9.77786910010434e-05, "loss": 3.4142, "step": 54850 }, { "epoch": 0.48481958353202986, "grad_norm": 2.0562987327575684, "learning_rate": 9.77745932307565e-05, "loss": 3.2347, "step": 54900 }, { "epoch": 0.4852611314223141, "grad_norm": 2.8436999320983887, "learning_rate": 9.777049177028742e-05, "loss": 3.4778, "step": 54950 }, { "epoch": 0.48570267931259825, "grad_norm": 3.21048641204834, "learning_rate": 9.776638661995292e-05, "loss": 3.4173, "step": 55000 }, { "epoch": 0.4861442272028824, "grad_norm": 2.347586154937744, "learning_rate": 9.77622777800701e-05, "loss": 3.6781, "step": 55050 }, { "epoch": 0.4865857750931666, "grad_norm": 1.2790504693984985, "learning_rate": 9.775816525095633e-05, "loss": 3.5449, "step": 55100 }, { "epoch": 0.4870273229834508, "grad_norm": 2.234579563140869, "learning_rate": 9.775404903292929e-05, "loss": 3.403, "step": 55150 }, { "epoch": 0.48746887087373497, "grad_norm": 3.1236841678619385, "learning_rate": 9.774992912630689e-05, "loss": 3.5316, "step": 55200 }, { "epoch": 0.48791041876401914, "grad_norm": 2.067030906677246, "learning_rate": 9.774580553140736e-05, "loss": 3.3101, "step": 55250 }, { "epoch": 0.4883519666543033, "grad_norm": 2.3011932373046875, "learning_rate": 9.774167824854925e-05, "loss": 3.9037, "step": 55300 }, { "epoch": 0.4887935145445875, "grad_norm": 0.8518275618553162, "learning_rate": 9.773754727805132e-05, "loss": 3.7854, "step": 55350 }, { "epoch": 0.4892350624348717, "grad_norm": 2.5740087032318115, "learning_rate": 9.773341262023265e-05, "loss": 3.6587, "step": 55400 }, { "epoch": 0.48967661032515586, "grad_norm": 2.541947364807129, "learning_rate": 9.772927427541266e-05, "loss": 3.3046, "step": 55450 }, { "epoch": 0.49011815821544, "grad_norm": 10.81189250946045, "learning_rate": 9.772513224391093e-05, "loss": 3.5087, "step": 55500 }, { "epoch": 0.49055970610572425, "grad_norm": 2.659097909927368, "learning_rate": 9.772098652604745e-05, "loss": 3.7244, "step": 55550 }, { "epoch": 0.4910012539960084, "grad_norm": 1.6622788906097412, "learning_rate": 9.771683712214241e-05, "loss": 3.9154, "step": 55600 }, { "epoch": 0.4914428018862926, "grad_norm": 4.244743347167969, "learning_rate": 9.771268403251634e-05, "loss": 3.1095, "step": 55650 }, { "epoch": 0.49188434977657675, "grad_norm": 2.233381986618042, "learning_rate": 9.770852725749002e-05, "loss": 2.7214, "step": 55700 }, { "epoch": 0.49232589766686097, "grad_norm": 3.992842674255371, "learning_rate": 9.770436679738452e-05, "loss": 2.8508, "step": 55750 }, { "epoch": 0.49276744555714513, "grad_norm": 2.8735084533691406, "learning_rate": 9.770020265252122e-05, "loss": 3.3086, "step": 55800 }, { "epoch": 0.4932089934474293, "grad_norm": 0.7846609950065613, "learning_rate": 9.769603482322173e-05, "loss": 3.0594, "step": 55850 }, { "epoch": 0.49365054133771347, "grad_norm": 3.95578670501709, "learning_rate": 9.769186330980802e-05, "loss": 3.2712, "step": 55900 }, { "epoch": 0.4940920892279977, "grad_norm": 1.6080371141433716, "learning_rate": 9.768768811260229e-05, "loss": 3.3038, "step": 55950 }, { "epoch": 0.49453363711828185, "grad_norm": 1.84148108959198, "learning_rate": 9.7683509231927e-05, "loss": 3.2961, "step": 56000 }, { "epoch": 0.494975185008566, "grad_norm": 3.487783670425415, "learning_rate": 9.7679326668105e-05, "loss": 3.0265, "step": 56050 }, { "epoch": 0.4954167328988502, "grad_norm": 1.8693515062332153, "learning_rate": 9.767514042145931e-05, "loss": 3.3785, "step": 56100 }, { "epoch": 0.4958582807891344, "grad_norm": 0.690427839756012, "learning_rate": 9.767095049231329e-05, "loss": 2.9811, "step": 56150 }, { "epoch": 0.4962998286794186, "grad_norm": 3.2560925483703613, "learning_rate": 9.766675688099059e-05, "loss": 3.3119, "step": 56200 }, { "epoch": 0.49674137656970274, "grad_norm": 2.757688522338867, "learning_rate": 9.766255958781512e-05, "loss": 3.3075, "step": 56250 }, { "epoch": 0.4971829244599869, "grad_norm": 1.4115214347839355, "learning_rate": 9.765835861311108e-05, "loss": 3.3612, "step": 56300 }, { "epoch": 0.49762447235027113, "grad_norm": 3.8688366413116455, "learning_rate": 9.765415395720298e-05, "loss": 3.1324, "step": 56350 }, { "epoch": 0.4980660202405553, "grad_norm": 3.989609479904175, "learning_rate": 9.764994562041559e-05, "loss": 3.1485, "step": 56400 }, { "epoch": 0.49850756813083946, "grad_norm": 2.1451547145843506, "learning_rate": 9.764573360307394e-05, "loss": 3.2281, "step": 56450 }, { "epoch": 0.49894911602112363, "grad_norm": 1.555163025856018, "learning_rate": 9.76415179055034e-05, "loss": 3.0442, "step": 56500 }, { "epoch": 0.49939066391140785, "grad_norm": 2.0306787490844727, "learning_rate": 9.76372985280296e-05, "loss": 3.3319, "step": 56550 }, { "epoch": 0.499832211801692, "grad_norm": 3.7980337142944336, "learning_rate": 9.763307547097844e-05, "loss": 3.6434, "step": 56600 }, { "epoch": 0.5002737596919762, "grad_norm": 4.4818525314331055, "learning_rate": 9.762884873467611e-05, "loss": 3.2294, "step": 56650 }, { "epoch": 0.5007153075822603, "grad_norm": 1.937999963760376, "learning_rate": 9.76246183194491e-05, "loss": 3.5661, "step": 56700 }, { "epoch": 0.5011568554725445, "grad_norm": 1.306862711906433, "learning_rate": 9.762038422562417e-05, "loss": 2.9257, "step": 56750 }, { "epoch": 0.5015984033628287, "grad_norm": 1.1766592264175415, "learning_rate": 9.761614645352835e-05, "loss": 3.3017, "step": 56800 }, { "epoch": 0.502039951253113, "grad_norm": 3.1086044311523438, "learning_rate": 9.7611905003489e-05, "loss": 3.5308, "step": 56850 }, { "epoch": 0.5024814991433971, "grad_norm": 2.8829550743103027, "learning_rate": 9.760765987583373e-05, "loss": 3.1106, "step": 56900 }, { "epoch": 0.5029230470336813, "grad_norm": 2.7359766960144043, "learning_rate": 9.760341107089044e-05, "loss": 3.509, "step": 56950 }, { "epoch": 0.5033645949239655, "grad_norm": 7.022033214569092, "learning_rate": 9.759915858898732e-05, "loss": 3.1945, "step": 57000 }, { "epoch": 0.5033645949239655, "eval_asr_loss": 0.9032422534916057, "eval_loss": 3.0304603576660156, "eval_runtime": 51.0735, "eval_samples_per_second": 15.037, "eval_steps_per_second": 3.759, "eval_tts_loss": 5.9164860032310695, "step": 57000 }, { "epoch": 0.5038061428142496, "grad_norm": 1.487955093383789, "learning_rate": 9.75949024304528e-05, "loss": 3.1603, "step": 57050 }, { "epoch": 0.5042476907045338, "grad_norm": 2.423590660095215, "learning_rate": 9.759064259561568e-05, "loss": 3.6678, "step": 57100 }, { "epoch": 0.504689238594818, "grad_norm": 1.796743631362915, "learning_rate": 9.758637908480497e-05, "loss": 3.4576, "step": 57150 }, { "epoch": 0.5051307864851021, "grad_norm": 6.50948429107666, "learning_rate": 9.758211189834999e-05, "loss": 3.2279, "step": 57200 }, { "epoch": 0.5055723343753864, "grad_norm": 1.0165084600448608, "learning_rate": 9.757784103658036e-05, "loss": 3.3161, "step": 57250 }, { "epoch": 0.5060138822656706, "grad_norm": 3.4719436168670654, "learning_rate": 9.757356649982595e-05, "loss": 3.6583, "step": 57300 }, { "epoch": 0.5064554301559547, "grad_norm": 2.7984747886657715, "learning_rate": 9.756928828841694e-05, "loss": 3.507, "step": 57350 }, { "epoch": 0.5068969780462389, "grad_norm": 4.911170959472656, "learning_rate": 9.75650064026838e-05, "loss": 3.0097, "step": 57400 }, { "epoch": 0.5073385259365231, "grad_norm": 3.57440185546875, "learning_rate": 9.756072084295725e-05, "loss": 2.9845, "step": 57450 }, { "epoch": 0.5077800738268072, "grad_norm": 1.4227149486541748, "learning_rate": 9.755651743023584e-05, "loss": 3.6389, "step": 57500 }, { "epoch": 0.5082216217170914, "grad_norm": 2.1636316776275635, "learning_rate": 9.755222459697919e-05, "loss": 2.9495, "step": 57550 }, { "epoch": 0.5086631696073756, "grad_norm": 5.281383991241455, "learning_rate": 9.754792809071644e-05, "loss": 3.388, "step": 57600 }, { "epoch": 0.5091047174976598, "grad_norm": 1.8442537784576416, "learning_rate": 9.754362791177944e-05, "loss": 3.7662, "step": 57650 }, { "epoch": 0.509546265387944, "grad_norm": 2.220319986343384, "learning_rate": 9.753932406050032e-05, "loss": 3.1771, "step": 57700 }, { "epoch": 0.5099878132782282, "grad_norm": 2.379897356033325, "learning_rate": 9.753501653721155e-05, "loss": 3.2315, "step": 57750 }, { "epoch": 0.5104293611685123, "grad_norm": 1.3445017337799072, "learning_rate": 9.753070534224584e-05, "loss": 3.4884, "step": 57800 }, { "epoch": 0.5108709090587965, "grad_norm": 1.7846251726150513, "learning_rate": 9.752639047593619e-05, "loss": 3.5118, "step": 57850 }, { "epoch": 0.5113124569490807, "grad_norm": 0.7859761714935303, "learning_rate": 9.752207193861589e-05, "loss": 3.4603, "step": 57900 }, { "epoch": 0.5117540048393648, "grad_norm": 1.91860830783844, "learning_rate": 9.751774973061851e-05, "loss": 3.354, "step": 57950 }, { "epoch": 0.512195552729649, "grad_norm": 2.4331154823303223, "learning_rate": 9.751342385227791e-05, "loss": 3.1493, "step": 58000 }, { "epoch": 0.5126371006199333, "grad_norm": 2.020382881164551, "learning_rate": 9.750909430392821e-05, "loss": 3.4823, "step": 58050 }, { "epoch": 0.5130786485102174, "grad_norm": 1.6145989894866943, "learning_rate": 9.750476108590383e-05, "loss": 3.2045, "step": 58100 }, { "epoch": 0.5135201964005016, "grad_norm": 4.1392364501953125, "learning_rate": 9.750042419853949e-05, "loss": 3.2311, "step": 58150 }, { "epoch": 0.5139617442907858, "grad_norm": 2.6319990158081055, "learning_rate": 9.749608364217018e-05, "loss": 3.4167, "step": 58200 }, { "epoch": 0.51440329218107, "grad_norm": 0.8864327073097229, "learning_rate": 9.749173941713114e-05, "loss": 3.4851, "step": 58250 }, { "epoch": 0.5148448400713541, "grad_norm": 1.8275792598724365, "learning_rate": 9.748739152375799e-05, "loss": 3.429, "step": 58300 }, { "epoch": 0.5152863879616383, "grad_norm": 1.4619648456573486, "learning_rate": 9.74830399623865e-05, "loss": 3.1952, "step": 58350 }, { "epoch": 0.5157279358519224, "grad_norm": 2.958252429962158, "learning_rate": 9.747868473335283e-05, "loss": 3.3281, "step": 58400 }, { "epoch": 0.5161694837422067, "grad_norm": 3.7064812183380127, "learning_rate": 9.747432583699334e-05, "loss": 3.1441, "step": 58450 }, { "epoch": 0.5166110316324909, "grad_norm": 2.135683298110962, "learning_rate": 9.746996327364478e-05, "loss": 3.3074, "step": 58500 }, { "epoch": 0.5170525795227751, "grad_norm": 4.96755838394165, "learning_rate": 9.746559704364409e-05, "loss": 3.1264, "step": 58550 }, { "epoch": 0.5174941274130592, "grad_norm": 1.5454546213150024, "learning_rate": 9.74612271473285e-05, "loss": 3.3129, "step": 58600 }, { "epoch": 0.5179356753033434, "grad_norm": 2.1017255783081055, "learning_rate": 9.74568535850356e-05, "loss": 3.2598, "step": 58650 }, { "epoch": 0.5183772231936276, "grad_norm": 1.2823448181152344, "learning_rate": 9.745247635710318e-05, "loss": 3.6969, "step": 58700 }, { "epoch": 0.5188187710839117, "grad_norm": 2.371886968612671, "learning_rate": 9.744809546386933e-05, "loss": 3.2902, "step": 58750 }, { "epoch": 0.5192603189741959, "grad_norm": 1.7832865715026855, "learning_rate": 9.744371090567246e-05, "loss": 2.8521, "step": 58800 }, { "epoch": 0.5197018668644802, "grad_norm": 1.12856125831604, "learning_rate": 9.743932268285124e-05, "loss": 3.7715, "step": 58850 }, { "epoch": 0.5201434147547643, "grad_norm": 1.2334407567977905, "learning_rate": 9.743493079574461e-05, "loss": 3.2311, "step": 58900 }, { "epoch": 0.5205849626450485, "grad_norm": 2.203479528427124, "learning_rate": 9.743053524469182e-05, "loss": 3.438, "step": 58950 }, { "epoch": 0.5210265105353327, "grad_norm": 0.8005960583686829, "learning_rate": 9.742613603003238e-05, "loss": 3.6582, "step": 59000 }, { "epoch": 0.5214680584256168, "grad_norm": 2.671816110610962, "learning_rate": 9.742173315210608e-05, "loss": 2.9465, "step": 59050 }, { "epoch": 0.521909606315901, "grad_norm": 0.9956416487693787, "learning_rate": 9.741732661125304e-05, "loss": 3.3792, "step": 59100 }, { "epoch": 0.5223511542061852, "grad_norm": 1.9408859014511108, "learning_rate": 9.741291640781359e-05, "loss": 3.2726, "step": 59150 }, { "epoch": 0.5227927020964693, "grad_norm": 1.1133266687393188, "learning_rate": 9.740850254212841e-05, "loss": 3.3387, "step": 59200 }, { "epoch": 0.5232342499867536, "grad_norm": 0.527526319026947, "learning_rate": 9.740408501453841e-05, "loss": 3.0193, "step": 59250 }, { "epoch": 0.5236757978770378, "grad_norm": 1.995932936668396, "learning_rate": 9.7399752285049e-05, "loss": 3.5348, "step": 59300 }, { "epoch": 0.5241173457673219, "grad_norm": 1.4613289833068848, "learning_rate": 9.739532750789442e-05, "loss": 3.0866, "step": 59350 }, { "epoch": 0.5245588936576061, "grad_norm": 2.9688708782196045, "learning_rate": 9.739089906985268e-05, "loss": 3.5847, "step": 59400 }, { "epoch": 0.5250004415478903, "grad_norm": 1.3395789861679077, "learning_rate": 9.738646697126586e-05, "loss": 3.353, "step": 59450 }, { "epoch": 0.5254419894381744, "grad_norm": 4.93052864074707, "learning_rate": 9.738203121247627e-05, "loss": 3.626, "step": 59500 }, { "epoch": 0.5258835373284586, "grad_norm": 3.3152246475219727, "learning_rate": 9.737759179382658e-05, "loss": 2.8786, "step": 59550 }, { "epoch": 0.5263250852187428, "grad_norm": 1.5898215770721436, "learning_rate": 9.737314871565965e-05, "loss": 3.7041, "step": 59600 }, { "epoch": 0.526766633109027, "grad_norm": 2.1526150703430176, "learning_rate": 9.736870197831871e-05, "loss": 3.5867, "step": 59650 }, { "epoch": 0.5272081809993112, "grad_norm": 2.351823568344116, "learning_rate": 9.736425158214723e-05, "loss": 3.4919, "step": 59700 }, { "epoch": 0.5276497288895954, "grad_norm": 2.4668421745300293, "learning_rate": 9.735979752748894e-05, "loss": 3.3577, "step": 59750 }, { "epoch": 0.5280912767798795, "grad_norm": 2.4981229305267334, "learning_rate": 9.73553398146879e-05, "loss": 3.368, "step": 59800 }, { "epoch": 0.5285328246701637, "grad_norm": 1.9264018535614014, "learning_rate": 9.735087844408839e-05, "loss": 3.7379, "step": 59850 }, { "epoch": 0.5289743725604479, "grad_norm": 4.922735691070557, "learning_rate": 9.734641341603507e-05, "loss": 3.1943, "step": 59900 }, { "epoch": 0.529415920450732, "grad_norm": 5.6230645179748535, "learning_rate": 9.73419447308728e-05, "loss": 3.3315, "step": 59950 }, { "epoch": 0.5298574683410162, "grad_norm": 1.985254168510437, "learning_rate": 9.733747238894673e-05, "loss": 3.8071, "step": 60000 }, { "epoch": 0.5298574683410162, "eval_asr_loss": 0.8992706317769239, "eval_loss": 3.0059359073638916, "eval_runtime": 20.6646, "eval_samples_per_second": 37.165, "eval_steps_per_second": 9.291, "eval_tts_loss": 5.9863666861098865, "step": 60000 }, { "epoch": 0.5302990162313005, "grad_norm": 1.029417872428894, "learning_rate": 9.733299639060233e-05, "loss": 3.503, "step": 60050 }, { "epoch": 0.5307405641215847, "grad_norm": 6.666355609893799, "learning_rate": 9.732851673618535e-05, "loss": 3.2843, "step": 60100 }, { "epoch": 0.5311821120118688, "grad_norm": 1.5539653301239014, "learning_rate": 9.732403342604177e-05, "loss": 3.374, "step": 60150 }, { "epoch": 0.531623659902153, "grad_norm": 1.662533164024353, "learning_rate": 9.731954646051792e-05, "loss": 3.1763, "step": 60200 }, { "epoch": 0.5320652077924372, "grad_norm": 1.5471640825271606, "learning_rate": 9.731505583996035e-05, "loss": 3.3524, "step": 60250 }, { "epoch": 0.5325067556827213, "grad_norm": 3.6004583835601807, "learning_rate": 9.731056156471594e-05, "loss": 3.4967, "step": 60300 }, { "epoch": 0.5329483035730055, "grad_norm": 5.77100944519043, "learning_rate": 9.730606363513184e-05, "loss": 3.3075, "step": 60350 }, { "epoch": 0.5333898514632897, "grad_norm": 2.9445252418518066, "learning_rate": 9.730156205155545e-05, "loss": 3.681, "step": 60400 }, { "epoch": 0.5338313993535739, "grad_norm": 3.9473483562469482, "learning_rate": 9.729705681433451e-05, "loss": 2.9724, "step": 60450 }, { "epoch": 0.5342729472438581, "grad_norm": 2.4145290851593018, "learning_rate": 9.729254792381698e-05, "loss": 3.1867, "step": 60500 }, { "epoch": 0.5347144951341423, "grad_norm": 2.9569525718688965, "learning_rate": 9.728803538035119e-05, "loss": 3.6975, "step": 60550 }, { "epoch": 0.5351560430244264, "grad_norm": 3.94183349609375, "learning_rate": 9.728351918428562e-05, "loss": 3.6706, "step": 60600 }, { "epoch": 0.5355975909147106, "grad_norm": 1.8953005075454712, "learning_rate": 9.727899933596917e-05, "loss": 3.3651, "step": 60650 }, { "epoch": 0.5360391388049948, "grad_norm": 1.777334213256836, "learning_rate": 9.727447583575092e-05, "loss": 3.7644, "step": 60700 }, { "epoch": 0.5364806866952789, "grad_norm": 3.474303960800171, "learning_rate": 9.72699486839803e-05, "loss": 3.2085, "step": 60750 }, { "epoch": 0.5369222345855631, "grad_norm": 2.88875412940979, "learning_rate": 9.726541788100696e-05, "loss": 3.8591, "step": 60800 }, { "epoch": 0.5373637824758474, "grad_norm": 3.021052598953247, "learning_rate": 9.726088342718091e-05, "loss": 3.3407, "step": 60850 }, { "epoch": 0.5378053303661315, "grad_norm": 1.9368441104888916, "learning_rate": 9.725634532285238e-05, "loss": 3.4342, "step": 60900 }, { "epoch": 0.5382468782564157, "grad_norm": 1.3616617918014526, "learning_rate": 9.725180356837187e-05, "loss": 3.1015, "step": 60950 }, { "epoch": 0.5386884261466999, "grad_norm": 1.2605106830596924, "learning_rate": 9.724725816409024e-05, "loss": 3.4434, "step": 61000 }, { "epoch": 0.539129974036984, "grad_norm": 2.5226216316223145, "learning_rate": 9.724270911035856e-05, "loss": 3.2771, "step": 61050 }, { "epoch": 0.5395715219272682, "grad_norm": 1.5489667654037476, "learning_rate": 9.723815640752818e-05, "loss": 2.9834, "step": 61100 }, { "epoch": 0.5400130698175524, "grad_norm": 0.3823397159576416, "learning_rate": 9.723360005595082e-05, "loss": 3.4476, "step": 61150 }, { "epoch": 0.5404546177078365, "grad_norm": 1.4170905351638794, "learning_rate": 9.722904005597838e-05, "loss": 2.9736, "step": 61200 }, { "epoch": 0.5408961655981208, "grad_norm": 3.013650417327881, "learning_rate": 9.722447640796306e-05, "loss": 3.4688, "step": 61250 }, { "epoch": 0.541337713488405, "grad_norm": 2.0270910263061523, "learning_rate": 9.721990911225742e-05, "loss": 3.4488, "step": 61300 }, { "epoch": 0.5417792613786891, "grad_norm": 2.3157079219818115, "learning_rate": 9.72153381692142e-05, "loss": 3.4686, "step": 61350 }, { "epoch": 0.5422208092689733, "grad_norm": 2.65665864944458, "learning_rate": 9.721076357918648e-05, "loss": 3.0758, "step": 61400 }, { "epoch": 0.5426623571592575, "grad_norm": 4.086440086364746, "learning_rate": 9.720618534252761e-05, "loss": 3.0561, "step": 61450 }, { "epoch": 0.5431039050495416, "grad_norm": 1.6047959327697754, "learning_rate": 9.720160345959122e-05, "loss": 3.2575, "step": 61500 }, { "epoch": 0.5435454529398258, "grad_norm": 1.371722936630249, "learning_rate": 9.719701793073121e-05, "loss": 3.197, "step": 61550 }, { "epoch": 0.54398700083011, "grad_norm": 1.1012533903121948, "learning_rate": 9.71924287563018e-05, "loss": 3.1246, "step": 61600 }, { "epoch": 0.5444285487203943, "grad_norm": 2.610647201538086, "learning_rate": 9.718783593665745e-05, "loss": 3.4834, "step": 61650 }, { "epoch": 0.5448700966106784, "grad_norm": 3.1460602283477783, "learning_rate": 9.71832394721529e-05, "loss": 3.5381, "step": 61700 }, { "epoch": 0.5453116445009626, "grad_norm": 2.426410675048828, "learning_rate": 9.717863936314322e-05, "loss": 3.0234, "step": 61750 }, { "epoch": 0.5457531923912468, "grad_norm": 1.7792102098464966, "learning_rate": 9.71740356099837e-05, "loss": 3.3962, "step": 61800 }, { "epoch": 0.5461947402815309, "grad_norm": 0.535715639591217, "learning_rate": 9.716942821302995e-05, "loss": 3.2452, "step": 61850 }, { "epoch": 0.5466362881718151, "grad_norm": 3.1257872581481934, "learning_rate": 9.716481717263787e-05, "loss": 3.2172, "step": 61900 }, { "epoch": 0.5470778360620993, "grad_norm": 2.848803997039795, "learning_rate": 9.71602024891636e-05, "loss": 3.5478, "step": 61950 }, { "epoch": 0.5475193839523834, "grad_norm": 2.5586445331573486, "learning_rate": 9.71555841629636e-05, "loss": 3.5599, "step": 62000 }, { "epoch": 0.5479609318426677, "grad_norm": 5.202526569366455, "learning_rate": 9.715096219439458e-05, "loss": 3.3383, "step": 62050 }, { "epoch": 0.5484024797329519, "grad_norm": 7.311855316162109, "learning_rate": 9.714633658381358e-05, "loss": 3.5755, "step": 62100 }, { "epoch": 0.548844027623236, "grad_norm": 2.228634834289551, "learning_rate": 9.714170733157784e-05, "loss": 3.1598, "step": 62150 }, { "epoch": 0.5492855755135202, "grad_norm": 0.7159774899482727, "learning_rate": 9.713707443804499e-05, "loss": 3.3646, "step": 62200 }, { "epoch": 0.5497271234038044, "grad_norm": 1.7473576068878174, "learning_rate": 9.713243790357282e-05, "loss": 3.153, "step": 62250 }, { "epoch": 0.5501686712940885, "grad_norm": 2.541585922241211, "learning_rate": 9.712779772851952e-05, "loss": 3.4916, "step": 62300 }, { "epoch": 0.5506102191843727, "grad_norm": 5.356276512145996, "learning_rate": 9.712315391324346e-05, "loss": 3.2563, "step": 62350 }, { "epoch": 0.5510517670746569, "grad_norm": 2.213956356048584, "learning_rate": 9.711850645810336e-05, "loss": 3.1959, "step": 62400 }, { "epoch": 0.5514933149649411, "grad_norm": 1.0666532516479492, "learning_rate": 9.711385536345818e-05, "loss": 3.4801, "step": 62450 }, { "epoch": 0.5519348628552253, "grad_norm": 5.453732967376709, "learning_rate": 9.71092006296672e-05, "loss": 3.1979, "step": 62500 }, { "epoch": 0.5523764107455095, "grad_norm": 6.855234622955322, "learning_rate": 9.710454225708994e-05, "loss": 3.6221, "step": 62550 }, { "epoch": 0.5528179586357936, "grad_norm": 1.6029174327850342, "learning_rate": 9.709988024608623e-05, "loss": 3.4752, "step": 62600 }, { "epoch": 0.5532595065260778, "grad_norm": 1.8403346538543701, "learning_rate": 9.709521459701616e-05, "loss": 3.4726, "step": 62650 }, { "epoch": 0.553701054416362, "grad_norm": 4.461715221405029, "learning_rate": 9.709054531024011e-05, "loss": 2.9593, "step": 62700 }, { "epoch": 0.5541426023066461, "grad_norm": 0.8452909588813782, "learning_rate": 9.708587238611877e-05, "loss": 3.2217, "step": 62750 }, { "epoch": 0.5545841501969304, "grad_norm": 1.3129770755767822, "learning_rate": 9.708119582501305e-05, "loss": 3.3426, "step": 62800 }, { "epoch": 0.5550256980872146, "grad_norm": 2.4353716373443604, "learning_rate": 9.707651562728419e-05, "loss": 3.4732, "step": 62850 }, { "epoch": 0.5554672459774987, "grad_norm": 1.2511876821517944, "learning_rate": 9.707183179329371e-05, "loss": 3.3568, "step": 62900 }, { "epoch": 0.5559087938677829, "grad_norm": 4.347702980041504, "learning_rate": 9.706714432340336e-05, "loss": 3.1255, "step": 62950 }, { "epoch": 0.5563503417580671, "grad_norm": 1.2785756587982178, "learning_rate": 9.706245321797525e-05, "loss": 3.5741, "step": 63000 }, { "epoch": 0.5563503417580671, "eval_asr_loss": 0.9045116492456787, "eval_loss": 3.014928102493286, "eval_runtime": 20.6473, "eval_samples_per_second": 37.196, "eval_steps_per_second": 9.299, "eval_tts_loss": 5.952768747740511, "step": 63000 }, { "epoch": 0.5567918896483512, "grad_norm": 1.4415526390075684, "learning_rate": 9.705775847737169e-05, "loss": 3.5287, "step": 63050 }, { "epoch": 0.5572334375386354, "grad_norm": 1.9535558223724365, "learning_rate": 9.705306010195533e-05, "loss": 3.3174, "step": 63100 }, { "epoch": 0.5576749854289196, "grad_norm": 1.3182282447814941, "learning_rate": 9.704835809208907e-05, "loss": 3.4299, "step": 63150 }, { "epoch": 0.5581165333192039, "grad_norm": 1.156562328338623, "learning_rate": 9.704365244813613e-05, "loss": 3.2571, "step": 63200 }, { "epoch": 0.558558081209488, "grad_norm": 1.929991364479065, "learning_rate": 9.703894317045993e-05, "loss": 3.0214, "step": 63250 }, { "epoch": 0.5589996290997722, "grad_norm": 1.8567146062850952, "learning_rate": 9.703423025942426e-05, "loss": 3.2539, "step": 63300 }, { "epoch": 0.5594411769900564, "grad_norm": 0.45442625880241394, "learning_rate": 9.702951371539315e-05, "loss": 3.5004, "step": 63350 }, { "epoch": 0.5598827248803405, "grad_norm": 2.3628077507019043, "learning_rate": 9.702479353873089e-05, "loss": 3.1837, "step": 63400 }, { "epoch": 0.5603242727706247, "grad_norm": 3.7257354259490967, "learning_rate": 9.702006972980208e-05, "loss": 3.503, "step": 63450 }, { "epoch": 0.5607658206609089, "grad_norm": 3.1037049293518066, "learning_rate": 9.701534228897163e-05, "loss": 3.2654, "step": 63500 }, { "epoch": 0.561207368551193, "grad_norm": 2.567664861679077, "learning_rate": 9.701061121660464e-05, "loss": 3.3444, "step": 63550 }, { "epoch": 0.5616489164414773, "grad_norm": 3.259284019470215, "learning_rate": 9.700587651306658e-05, "loss": 3.0933, "step": 63600 }, { "epoch": 0.5620904643317615, "grad_norm": 3.6262829303741455, "learning_rate": 9.700113817872317e-05, "loss": 3.4273, "step": 63650 }, { "epoch": 0.5625320122220456, "grad_norm": 2.7692084312438965, "learning_rate": 9.699639621394039e-05, "loss": 3.5773, "step": 63700 }, { "epoch": 0.5629735601123298, "grad_norm": 2.4339802265167236, "learning_rate": 9.699165061908451e-05, "loss": 2.8369, "step": 63750 }, { "epoch": 0.563415108002614, "grad_norm": 1.9642480611801147, "learning_rate": 9.69869013945221e-05, "loss": 3.533, "step": 63800 }, { "epoch": 0.5638566558928981, "grad_norm": 4.266359329223633, "learning_rate": 9.698214854062e-05, "loss": 3.3043, "step": 63850 }, { "epoch": 0.5642982037831823, "grad_norm": 1.5324000120162964, "learning_rate": 9.697739205774532e-05, "loss": 3.6485, "step": 63900 }, { "epoch": 0.5647397516734665, "grad_norm": 3.5735909938812256, "learning_rate": 9.6972727184053e-05, "loss": 3.3017, "step": 63950 }, { "epoch": 0.5651812995637507, "grad_norm": 1.4858098030090332, "learning_rate": 9.696796351689678e-05, "loss": 3.4435, "step": 64000 }, { "epoch": 0.5656228474540349, "grad_norm": 3.1134603023529053, "learning_rate": 9.696319622186367e-05, "loss": 2.8183, "step": 64050 }, { "epoch": 0.5660643953443191, "grad_norm": 0.87113356590271, "learning_rate": 9.695842529932186e-05, "loss": 3.5863, "step": 64100 }, { "epoch": 0.5665059432346032, "grad_norm": 2.494359016418457, "learning_rate": 9.695365074963992e-05, "loss": 3.2139, "step": 64150 }, { "epoch": 0.5669474911248874, "grad_norm": 4.142000198364258, "learning_rate": 9.694887257318659e-05, "loss": 3.4118, "step": 64200 }, { "epoch": 0.5673890390151716, "grad_norm": 7.738039493560791, "learning_rate": 9.694409077033097e-05, "loss": 3.3375, "step": 64250 }, { "epoch": 0.5678305869054557, "grad_norm": 2.2657651901245117, "learning_rate": 9.693930534144243e-05, "loss": 3.5303, "step": 64300 }, { "epoch": 0.5682721347957399, "grad_norm": 1.5491681098937988, "learning_rate": 9.693451628689059e-05, "loss": 3.1383, "step": 64350 }, { "epoch": 0.5687136826860242, "grad_norm": 5.7371745109558105, "learning_rate": 9.692972360704534e-05, "loss": 3.3374, "step": 64400 }, { "epoch": 0.5691552305763083, "grad_norm": 1.6547493934631348, "learning_rate": 9.69249273022769e-05, "loss": 3.5325, "step": 64450 }, { "epoch": 0.5695967784665925, "grad_norm": 4.246345043182373, "learning_rate": 9.692012737295574e-05, "loss": 3.487, "step": 64500 }, { "epoch": 0.5700383263568767, "grad_norm": 4.030117988586426, "learning_rate": 9.69153238194526e-05, "loss": 3.0569, "step": 64550 }, { "epoch": 0.5704798742471608, "grad_norm": 1.0426106452941895, "learning_rate": 9.691051664213855e-05, "loss": 3.3855, "step": 64600 }, { "epoch": 0.570921422137445, "grad_norm": 1.773262619972229, "learning_rate": 9.690570584138486e-05, "loss": 3.0886, "step": 64650 }, { "epoch": 0.5713629700277292, "grad_norm": 2.984727144241333, "learning_rate": 9.690089141756316e-05, "loss": 3.3807, "step": 64700 }, { "epoch": 0.5718045179180133, "grad_norm": 2.3156557083129883, "learning_rate": 9.689607337104528e-05, "loss": 3.3806, "step": 64750 }, { "epoch": 0.5722460658082976, "grad_norm": 3.1280009746551514, "learning_rate": 9.689125170220341e-05, "loss": 3.223, "step": 64800 }, { "epoch": 0.5726876136985818, "grad_norm": 1.362710952758789, "learning_rate": 9.688642641140999e-05, "loss": 2.7777, "step": 64850 }, { "epoch": 0.573129161588866, "grad_norm": 2.129289150238037, "learning_rate": 9.68815974990377e-05, "loss": 3.2796, "step": 64900 }, { "epoch": 0.5735707094791501, "grad_norm": 3.2772629261016846, "learning_rate": 9.687676496545955e-05, "loss": 3.4964, "step": 64950 }, { "epoch": 0.5740122573694343, "grad_norm": 2.643249034881592, "learning_rate": 9.68719288110488e-05, "loss": 3.6755, "step": 65000 }, { "epoch": 0.5744538052597185, "grad_norm": 1.4029085636138916, "learning_rate": 9.686708903617902e-05, "loss": 3.1452, "step": 65050 }, { "epoch": 0.5748953531500026, "grad_norm": 0.7895709276199341, "learning_rate": 9.686224564122403e-05, "loss": 3.233, "step": 65100 }, { "epoch": 0.5753369010402868, "grad_norm": 1.5120134353637695, "learning_rate": 9.685739862655793e-05, "loss": 3.2801, "step": 65150 }, { "epoch": 0.5757784489305711, "grad_norm": 1.919240117073059, "learning_rate": 9.685254799255517e-05, "loss": 3.3301, "step": 65200 }, { "epoch": 0.5762199968208552, "grad_norm": 2.2543394565582275, "learning_rate": 9.684769373959033e-05, "loss": 2.9593, "step": 65250 }, { "epoch": 0.5766615447111394, "grad_norm": 1.0832370519638062, "learning_rate": 9.684283586803843e-05, "loss": 3.6906, "step": 65300 }, { "epoch": 0.5771030926014236, "grad_norm": 3.100877285003662, "learning_rate": 9.683797437827466e-05, "loss": 3.347, "step": 65350 }, { "epoch": 0.5775446404917077, "grad_norm": 3.069957733154297, "learning_rate": 9.683310927067455e-05, "loss": 3.0518, "step": 65400 }, { "epoch": 0.5779861883819919, "grad_norm": 1.4712010622024536, "learning_rate": 9.682824054561389e-05, "loss": 3.2539, "step": 65450 }, { "epoch": 0.5784277362722761, "grad_norm": 1.5608115196228027, "learning_rate": 9.682336820346874e-05, "loss": 3.4519, "step": 65500 }, { "epoch": 0.5788692841625602, "grad_norm": 2.5109148025512695, "learning_rate": 9.681849224461544e-05, "loss": 3.3817, "step": 65550 }, { "epoch": 0.5793108320528445, "grad_norm": 3.372307300567627, "learning_rate": 9.68136126694306e-05, "loss": 3.4769, "step": 65600 }, { "epoch": 0.5797523799431287, "grad_norm": 2.304180860519409, "learning_rate": 9.680872947829118e-05, "loss": 3.8492, "step": 65650 }, { "epoch": 0.5801939278334128, "grad_norm": 0.9315201044082642, "learning_rate": 9.680384267157434e-05, "loss": 3.7529, "step": 65700 }, { "epoch": 0.580635475723697, "grad_norm": 1.882952332496643, "learning_rate": 9.679895224965752e-05, "loss": 3.0259, "step": 65750 }, { "epoch": 0.5810770236139812, "grad_norm": 3.846442222595215, "learning_rate": 9.679405821291849e-05, "loss": 3.0785, "step": 65800 }, { "epoch": 0.5815185715042653, "grad_norm": 1.7225450277328491, "learning_rate": 9.678916056173526e-05, "loss": 3.5125, "step": 65850 }, { "epoch": 0.5819601193945495, "grad_norm": 0.8811081051826477, "learning_rate": 9.678425929648614e-05, "loss": 3.0916, "step": 65900 }, { "epoch": 0.5824016672848337, "grad_norm": 3.4488401412963867, "learning_rate": 9.67793544175497e-05, "loss": 3.0175, "step": 65950 }, { "epoch": 0.582843215175118, "grad_norm": 2.16182804107666, "learning_rate": 9.677444592530483e-05, "loss": 3.6373, "step": 66000 }, { "epoch": 0.582843215175118, "eval_asr_loss": 0.8874591935323445, "eval_loss": 3.00752854347229, "eval_runtime": 21.0653, "eval_samples_per_second": 36.458, "eval_steps_per_second": 9.115, "eval_tts_loss": 5.963853557317323, "step": 66000 }, { "epoch": 0.5832847630654021, "grad_norm": 2.242957353591919, "learning_rate": 9.676953382013063e-05, "loss": 3.0162, "step": 66050 }, { "epoch": 0.5837263109556863, "grad_norm": 2.5106394290924072, "learning_rate": 9.676461810240654e-05, "loss": 3.4729, "step": 66100 }, { "epoch": 0.5841678588459704, "grad_norm": 3.770650863647461, "learning_rate": 9.675969877251225e-05, "loss": 3.4405, "step": 66150 }, { "epoch": 0.5846094067362546, "grad_norm": 2.2024853229522705, "learning_rate": 9.675477583082775e-05, "loss": 3.5091, "step": 66200 }, { "epoch": 0.5850509546265388, "grad_norm": 5.273355007171631, "learning_rate": 9.674984927773328e-05, "loss": 3.5578, "step": 66250 }, { "epoch": 0.5854925025168229, "grad_norm": 2.3930509090423584, "learning_rate": 9.674491911360939e-05, "loss": 3.269, "step": 66300 }, { "epoch": 0.5859340504071071, "grad_norm": 0.5656553506851196, "learning_rate": 9.673998533883687e-05, "loss": 3.4693, "step": 66350 }, { "epoch": 0.5863755982973914, "grad_norm": 1.1394141912460327, "learning_rate": 9.673504795379683e-05, "loss": 3.4782, "step": 66400 }, { "epoch": 0.5868171461876756, "grad_norm": 2.660053253173828, "learning_rate": 9.673010695887064e-05, "loss": 3.5723, "step": 66450 }, { "epoch": 0.5872586940779597, "grad_norm": 2.931849718093872, "learning_rate": 9.672516235443994e-05, "loss": 3.0214, "step": 66500 }, { "epoch": 0.5877002419682439, "grad_norm": 3.5768563747406006, "learning_rate": 9.672021414088667e-05, "loss": 3.0214, "step": 66550 }, { "epoch": 0.588141789858528, "grad_norm": 6.038229465484619, "learning_rate": 9.671526231859305e-05, "loss": 3.4275, "step": 66600 }, { "epoch": 0.5885833377488122, "grad_norm": 3.714881181716919, "learning_rate": 9.671030688794153e-05, "loss": 2.9659, "step": 66650 }, { "epoch": 0.5890248856390964, "grad_norm": 1.5607867240905762, "learning_rate": 9.670544706544311e-05, "loss": 3.4904, "step": 66700 }, { "epoch": 0.5894664335293806, "grad_norm": 1.8332650661468506, "learning_rate": 9.67004844913725e-05, "loss": 3.0832, "step": 66750 }, { "epoch": 0.5899079814196648, "grad_norm": 2.306621789932251, "learning_rate": 9.669551831008545e-05, "loss": 3.1016, "step": 66800 }, { "epoch": 0.590349529309949, "grad_norm": 1.265374779701233, "learning_rate": 9.66905485219656e-05, "loss": 3.3778, "step": 66850 }, { "epoch": 0.5907910772002332, "grad_norm": 2.479994773864746, "learning_rate": 9.66855751273968e-05, "loss": 3.3998, "step": 66900 }, { "epoch": 0.5912326250905173, "grad_norm": 2.6619317531585693, "learning_rate": 9.66805981267632e-05, "loss": 3.0006, "step": 66950 }, { "epoch": 0.5916741729808015, "grad_norm": 2.5897438526153564, "learning_rate": 9.667561752044922e-05, "loss": 3.6474, "step": 67000 }, { "epoch": 0.5921157208710857, "grad_norm": 5.512159824371338, "learning_rate": 9.667063330883961e-05, "loss": 3.457, "step": 67050 }, { "epoch": 0.5925572687613698, "grad_norm": 2.0321197509765625, "learning_rate": 9.666564549231931e-05, "loss": 3.653, "step": 67100 }, { "epoch": 0.592998816651654, "grad_norm": 2.648627519607544, "learning_rate": 9.666065407127361e-05, "loss": 2.8554, "step": 67150 }, { "epoch": 0.5934403645419383, "grad_norm": 2.1089653968811035, "learning_rate": 9.665565904608806e-05, "loss": 3.0816, "step": 67200 }, { "epoch": 0.5938819124322224, "grad_norm": 0.6567474007606506, "learning_rate": 9.665066041714849e-05, "loss": 3.4614, "step": 67250 }, { "epoch": 0.5943234603225066, "grad_norm": 4.268828392028809, "learning_rate": 9.664565818484097e-05, "loss": 3.1899, "step": 67300 }, { "epoch": 0.5947650082127908, "grad_norm": 4.196305751800537, "learning_rate": 9.664065234955191e-05, "loss": 3.3985, "step": 67350 }, { "epoch": 0.5952065561030749, "grad_norm": 4.905257225036621, "learning_rate": 9.663564291166795e-05, "loss": 3.3829, "step": 67400 }, { "epoch": 0.5956481039933591, "grad_norm": 3.6008496284484863, "learning_rate": 9.663062987157604e-05, "loss": 2.946, "step": 67450 }, { "epoch": 0.5960896518836433, "grad_norm": 1.2707490921020508, "learning_rate": 9.66256132296634e-05, "loss": 3.4213, "step": 67500 }, { "epoch": 0.5965311997739274, "grad_norm": 3.5220985412597656, "learning_rate": 9.662059298631749e-05, "loss": 3.0964, "step": 67550 }, { "epoch": 0.5969727476642117, "grad_norm": 1.3247735500335693, "learning_rate": 9.66155691419261e-05, "loss": 3.4796, "step": 67600 }, { "epoch": 0.5974142955544959, "grad_norm": 1.086634874343872, "learning_rate": 9.66105416968773e-05, "loss": 3.4287, "step": 67650 }, { "epoch": 0.59785584344478, "grad_norm": 3.1261913776397705, "learning_rate": 9.660551065155938e-05, "loss": 3.239, "step": 67700 }, { "epoch": 0.5982973913350642, "grad_norm": 5.029034614562988, "learning_rate": 9.660047600636099e-05, "loss": 2.8555, "step": 67750 }, { "epoch": 0.5987389392253484, "grad_norm": 2.428929328918457, "learning_rate": 9.659543776167097e-05, "loss": 3.331, "step": 67800 }, { "epoch": 0.5991804871156325, "grad_norm": 2.1692111492156982, "learning_rate": 9.65903959178785e-05, "loss": 3.2252, "step": 67850 }, { "epoch": 0.5996220350059167, "grad_norm": 5.579278945922852, "learning_rate": 9.658535047537303e-05, "loss": 3.2612, "step": 67900 }, { "epoch": 0.6000635828962009, "grad_norm": 1.7974921464920044, "learning_rate": 9.658030143454426e-05, "loss": 3.3738, "step": 67950 }, { "epoch": 0.6005051307864852, "grad_norm": 2.329169750213623, "learning_rate": 9.657524879578221e-05, "loss": 3.358, "step": 68000 }, { "epoch": 0.6009466786767693, "grad_norm": 2.0072598457336426, "learning_rate": 9.657019255947712e-05, "loss": 3.1542, "step": 68050 }, { "epoch": 0.6013882265670535, "grad_norm": 1.212756872177124, "learning_rate": 9.656513272601957e-05, "loss": 2.8526, "step": 68100 }, { "epoch": 0.6018297744573377, "grad_norm": 2.1172633171081543, "learning_rate": 9.656006929580036e-05, "loss": 3.3746, "step": 68150 }, { "epoch": 0.6022713223476218, "grad_norm": 3.0865988731384277, "learning_rate": 9.655500226921064e-05, "loss": 3.4431, "step": 68200 }, { "epoch": 0.602712870237906, "grad_norm": 2.7705607414245605, "learning_rate": 9.654993164664175e-05, "loss": 3.3185, "step": 68250 }, { "epoch": 0.6031544181281902, "grad_norm": 2.9133379459381104, "learning_rate": 9.654485742848538e-05, "loss": 3.2805, "step": 68300 }, { "epoch": 0.6035959660184743, "grad_norm": 1.945061445236206, "learning_rate": 9.653977961513347e-05, "loss": 3.5587, "step": 68350 }, { "epoch": 0.6040375139087586, "grad_norm": 0.5719209313392639, "learning_rate": 9.653469820697822e-05, "loss": 3.039, "step": 68400 }, { "epoch": 0.6044790617990428, "grad_norm": 2.7784836292266846, "learning_rate": 9.652961320441214e-05, "loss": 3.6163, "step": 68450 }, { "epoch": 0.6049206096893269, "grad_norm": 1.212082028388977, "learning_rate": 9.652452460782799e-05, "loss": 2.763, "step": 68500 }, { "epoch": 0.6053621575796111, "grad_norm": 6.158472061157227, "learning_rate": 9.651943241761884e-05, "loss": 3.4118, "step": 68550 }, { "epoch": 0.6058037054698953, "grad_norm": 1.3418790102005005, "learning_rate": 9.6514336634178e-05, "loss": 3.3112, "step": 68600 }, { "epoch": 0.6062452533601794, "grad_norm": 2.3166797161102295, "learning_rate": 9.65092372578991e-05, "loss": 3.3965, "step": 68650 }, { "epoch": 0.6066868012504636, "grad_norm": 2.7341291904449463, "learning_rate": 9.6504134289176e-05, "loss": 3.4362, "step": 68700 }, { "epoch": 0.6071283491407478, "grad_norm": 5.785979270935059, "learning_rate": 9.649902772840287e-05, "loss": 3.5519, "step": 68750 }, { "epoch": 0.607569897031032, "grad_norm": 1.710016131401062, "learning_rate": 9.649391757597414e-05, "loss": 3.8108, "step": 68800 }, { "epoch": 0.6080114449213162, "grad_norm": 1.7782738208770752, "learning_rate": 9.648880383228455e-05, "loss": 3.0538, "step": 68850 }, { "epoch": 0.6084529928116004, "grad_norm": 2.464695930480957, "learning_rate": 9.648368649772907e-05, "loss": 3.4832, "step": 68900 }, { "epoch": 0.6088945407018845, "grad_norm": 2.7873873710632324, "learning_rate": 9.647856557270299e-05, "loss": 3.5608, "step": 68950 }, { "epoch": 0.6093360885921687, "grad_norm": 2.8136725425720215, "learning_rate": 9.647344105760183e-05, "loss": 2.8681, "step": 69000 }, { "epoch": 0.6093360885921687, "eval_asr_loss": 0.9085685970427385, "eval_loss": 3.00970458984375, "eval_runtime": 20.9691, "eval_samples_per_second": 36.625, "eval_steps_per_second": 9.156, "eval_tts_loss": 5.906105015571997, "step": 69000 }, { "epoch": 0.6097776364824529, "grad_norm": 1.4982023239135742, "learning_rate": 9.646831295282143e-05, "loss": 3.7657, "step": 69050 }, { "epoch": 0.610219184372737, "grad_norm": 1.8872673511505127, "learning_rate": 9.646318125875791e-05, "loss": 3.1832, "step": 69100 }, { "epoch": 0.6106607322630212, "grad_norm": 3.1537160873413086, "learning_rate": 9.645804597580761e-05, "loss": 3.028, "step": 69150 }, { "epoch": 0.6111022801533055, "grad_norm": 2.5125792026519775, "learning_rate": 9.645290710436722e-05, "loss": 3.2232, "step": 69200 }, { "epoch": 0.6115438280435896, "grad_norm": 2.1751527786254883, "learning_rate": 9.644776464483368e-05, "loss": 3.1004, "step": 69250 }, { "epoch": 0.6119853759338738, "grad_norm": 1.4385361671447754, "learning_rate": 9.644261859760417e-05, "loss": 2.8845, "step": 69300 }, { "epoch": 0.612426923824158, "grad_norm": 1.0426270961761475, "learning_rate": 9.643746896307622e-05, "loss": 3.3967, "step": 69350 }, { "epoch": 0.6128684717144421, "grad_norm": 5.886585712432861, "learning_rate": 9.643231574164755e-05, "loss": 3.2875, "step": 69400 }, { "epoch": 0.6133100196047263, "grad_norm": 3.5066511631011963, "learning_rate": 9.642715893371622e-05, "loss": 2.8433, "step": 69450 }, { "epoch": 0.6137515674950105, "grad_norm": 2.092880964279175, "learning_rate": 9.642199853968055e-05, "loss": 3.2078, "step": 69500 }, { "epoch": 0.6141931153852946, "grad_norm": 1.670851707458496, "learning_rate": 9.641683455993916e-05, "loss": 3.0111, "step": 69550 }, { "epoch": 0.6146346632755789, "grad_norm": 3.2171874046325684, "learning_rate": 9.641177038132528e-05, "loss": 2.9596, "step": 69600 }, { "epoch": 0.6150762111658631, "grad_norm": 1.39114248752594, "learning_rate": 9.640659930306355e-05, "loss": 3.2125, "step": 69650 }, { "epoch": 0.6155177590561473, "grad_norm": 0.8414401412010193, "learning_rate": 9.640142464028551e-05, "loss": 3.3546, "step": 69700 }, { "epoch": 0.6159593069464314, "grad_norm": 2.521662473678589, "learning_rate": 9.639624639339092e-05, "loss": 3.2137, "step": 69750 }, { "epoch": 0.6164008548367156, "grad_norm": 2.6045455932617188, "learning_rate": 9.63910645627797e-05, "loss": 3.4016, "step": 69800 }, { "epoch": 0.6168424027269998, "grad_norm": 2.437931537628174, "learning_rate": 9.638587914885215e-05, "loss": 3.0942, "step": 69850 }, { "epoch": 0.6172839506172839, "grad_norm": 2.744288682937622, "learning_rate": 9.638069015200877e-05, "loss": 3.4627, "step": 69900 }, { "epoch": 0.6177254985075681, "grad_norm": 8.556507110595703, "learning_rate": 9.637549757265037e-05, "loss": 2.9197, "step": 69950 }, { "epoch": 0.6181670463978524, "grad_norm": 2.708615303039551, "learning_rate": 9.637030141117803e-05, "loss": 3.4477, "step": 70000 }, { "epoch": 0.6186085942881365, "grad_norm": 2.7415497303009033, "learning_rate": 9.636510166799313e-05, "loss": 3.3546, "step": 70050 }, { "epoch": 0.6190501421784207, "grad_norm": 2.8086228370666504, "learning_rate": 9.635989834349728e-05, "loss": 3.3978, "step": 70100 }, { "epoch": 0.6194916900687049, "grad_norm": 2.983367681503296, "learning_rate": 9.635469143809239e-05, "loss": 3.243, "step": 70150 }, { "epoch": 0.619933237958989, "grad_norm": 0.500150740146637, "learning_rate": 9.634948095218068e-05, "loss": 3.3688, "step": 70200 }, { "epoch": 0.6203747858492732, "grad_norm": 2.545722484588623, "learning_rate": 9.63442668861646e-05, "loss": 3.6102, "step": 70250 }, { "epoch": 0.6208163337395574, "grad_norm": 0.7100191712379456, "learning_rate": 9.633904924044687e-05, "loss": 3.7465, "step": 70300 }, { "epoch": 0.6212578816298415, "grad_norm": 1.9451789855957031, "learning_rate": 9.633382801543055e-05, "loss": 3.3812, "step": 70350 }, { "epoch": 0.6216994295201258, "grad_norm": 2.1257078647613525, "learning_rate": 9.632860321151892e-05, "loss": 3.0553, "step": 70400 }, { "epoch": 0.62214097741041, "grad_norm": 0.8686696290969849, "learning_rate": 9.632337482911553e-05, "loss": 2.8603, "step": 70450 }, { "epoch": 0.6225825253006941, "grad_norm": 6.291978359222412, "learning_rate": 9.631814286862426e-05, "loss": 3.1015, "step": 70500 }, { "epoch": 0.6230240731909783, "grad_norm": 3.416860818862915, "learning_rate": 9.631290733044921e-05, "loss": 3.5239, "step": 70550 }, { "epoch": 0.6234656210812625, "grad_norm": 1.6016830205917358, "learning_rate": 9.63076682149948e-05, "loss": 3.2555, "step": 70600 }, { "epoch": 0.6239071689715466, "grad_norm": 2.5389463901519775, "learning_rate": 9.630242552266569e-05, "loss": 3.5458, "step": 70650 }, { "epoch": 0.6243487168618308, "grad_norm": 2.5725655555725098, "learning_rate": 9.629717925386683e-05, "loss": 3.0217, "step": 70700 }, { "epoch": 0.624790264752115, "grad_norm": 2.941615581512451, "learning_rate": 9.629192940900348e-05, "loss": 3.2919, "step": 70750 }, { "epoch": 0.6252318126423992, "grad_norm": 2.9366567134857178, "learning_rate": 9.628667598848113e-05, "loss": 3.0738, "step": 70800 }, { "epoch": 0.6256733605326834, "grad_norm": 4.780130863189697, "learning_rate": 9.628141899270554e-05, "loss": 3.4164, "step": 70850 }, { "epoch": 0.6261149084229676, "grad_norm": 2.4676146507263184, "learning_rate": 9.62761584220828e-05, "loss": 2.6195, "step": 70900 }, { "epoch": 0.6265564563132517, "grad_norm": 2.815596103668213, "learning_rate": 9.627089427701923e-05, "loss": 3.1961, "step": 70950 }, { "epoch": 0.6269980042035359, "grad_norm": 1.198612093925476, "learning_rate": 9.626562655792145e-05, "loss": 3.446, "step": 71000 }, { "epoch": 0.6274395520938201, "grad_norm": 0.4727330803871155, "learning_rate": 9.626035526519632e-05, "loss": 3.252, "step": 71050 }, { "epoch": 0.6278810999841042, "grad_norm": 2.9533727169036865, "learning_rate": 9.625508039925104e-05, "loss": 3.3784, "step": 71100 }, { "epoch": 0.6283226478743884, "grad_norm": 1.3359452486038208, "learning_rate": 9.624980196049303e-05, "loss": 3.5926, "step": 71150 }, { "epoch": 0.6287641957646727, "grad_norm": 1.3383703231811523, "learning_rate": 9.624451994932999e-05, "loss": 3.4367, "step": 71200 }, { "epoch": 0.6292057436549569, "grad_norm": 3.669863224029541, "learning_rate": 9.623923436616996e-05, "loss": 3.4483, "step": 71250 }, { "epoch": 0.629647291545241, "grad_norm": 3.541435718536377, "learning_rate": 9.623394521142113e-05, "loss": 3.5803, "step": 71300 }, { "epoch": 0.6300888394355252, "grad_norm": 1.2664039134979248, "learning_rate": 9.622865248549211e-05, "loss": 3.02, "step": 71350 }, { "epoch": 0.6305303873258093, "grad_norm": 3.8231582641601562, "learning_rate": 9.622335618879168e-05, "loss": 3.2505, "step": 71400 }, { "epoch": 0.6309719352160935, "grad_norm": 1.3903136253356934, "learning_rate": 9.621805632172896e-05, "loss": 2.9738, "step": 71450 }, { "epoch": 0.6314134831063777, "grad_norm": 1.4685653448104858, "learning_rate": 9.621275288471329e-05, "loss": 3.4252, "step": 71500 }, { "epoch": 0.6318550309966618, "grad_norm": 4.989869594573975, "learning_rate": 9.620744587815435e-05, "loss": 3.2167, "step": 71550 }, { "epoch": 0.6322965788869461, "grad_norm": 1.4381548166275024, "learning_rate": 9.620213530246205e-05, "loss": 3.5436, "step": 71600 }, { "epoch": 0.6327381267772303, "grad_norm": 1.2252341508865356, "learning_rate": 9.619682115804656e-05, "loss": 3.0586, "step": 71650 }, { "epoch": 0.6331796746675145, "grad_norm": 4.090355396270752, "learning_rate": 9.619150344531838e-05, "loss": 3.4366, "step": 71700 }, { "epoch": 0.6336212225577986, "grad_norm": 1.2702091932296753, "learning_rate": 9.618618216468824e-05, "loss": 2.9698, "step": 71750 }, { "epoch": 0.6340627704480828, "grad_norm": 3.03657865524292, "learning_rate": 9.61808573165672e-05, "loss": 3.0061, "step": 71800 }, { "epoch": 0.634504318338367, "grad_norm": 2.344271421432495, "learning_rate": 9.617552890136652e-05, "loss": 2.6665, "step": 71850 }, { "epoch": 0.6349458662286511, "grad_norm": 1.6858032941818237, "learning_rate": 9.61701969194978e-05, "loss": 3.1685, "step": 71900 }, { "epoch": 0.6353874141189353, "grad_norm": 3.873769760131836, "learning_rate": 9.616486137137287e-05, "loss": 3.5466, "step": 71950 }, { "epoch": 0.6358289620092196, "grad_norm": 1.7075845003128052, "learning_rate": 9.615952225740385e-05, "loss": 3.3663, "step": 72000 }, { "epoch": 0.6358289620092196, "eval_asr_loss": 0.9085645562718005, "eval_loss": 3.0017716884613037, "eval_runtime": 20.9003, "eval_samples_per_second": 36.746, "eval_steps_per_second": 9.186, "eval_tts_loss": 5.94018095728795, "step": 72000 }, { "epoch": 0.6362705098995037, "grad_norm": 2.023348331451416, "learning_rate": 9.615417957800319e-05, "loss": 3.2043, "step": 72050 }, { "epoch": 0.6367120577897879, "grad_norm": 1.3778334856033325, "learning_rate": 9.614883333358351e-05, "loss": 3.2908, "step": 72100 }, { "epoch": 0.6371536056800721, "grad_norm": 2.4582135677337646, "learning_rate": 9.61434835245578e-05, "loss": 3.4521, "step": 72150 }, { "epoch": 0.6375951535703562, "grad_norm": 4.403940200805664, "learning_rate": 9.613813015133926e-05, "loss": 3.1724, "step": 72200 }, { "epoch": 0.6380367014606404, "grad_norm": 1.1220415830612183, "learning_rate": 9.613277321434141e-05, "loss": 3.2921, "step": 72250 }, { "epoch": 0.6384782493509246, "grad_norm": 5.00771427154541, "learning_rate": 9.612741271397802e-05, "loss": 3.4677, "step": 72300 }, { "epoch": 0.6389197972412087, "grad_norm": 2.081688404083252, "learning_rate": 9.612204865066317e-05, "loss": 2.9708, "step": 72350 }, { "epoch": 0.639361345131493, "grad_norm": 1.0858304500579834, "learning_rate": 9.611668102481114e-05, "loss": 3.3104, "step": 72400 }, { "epoch": 0.6398028930217772, "grad_norm": 4.691798686981201, "learning_rate": 9.611141729550216e-05, "loss": 3.4249, "step": 72450 }, { "epoch": 0.6402444409120613, "grad_norm": 1.6770505905151367, "learning_rate": 9.610604261705e-05, "loss": 3.3248, "step": 72500 }, { "epoch": 0.6406859888023455, "grad_norm": 0.5892982482910156, "learning_rate": 9.610066437729704e-05, "loss": 3.2216, "step": 72550 }, { "epoch": 0.6411275366926297, "grad_norm": 2.631664514541626, "learning_rate": 9.609528257665864e-05, "loss": 3.4193, "step": 72600 }, { "epoch": 0.6415690845829138, "grad_norm": 1.2707042694091797, "learning_rate": 9.608989721555055e-05, "loss": 3.3635, "step": 72650 }, { "epoch": 0.642010632473198, "grad_norm": 3.7268316745758057, "learning_rate": 9.608450829438873e-05, "loss": 3.5901, "step": 72700 }, { "epoch": 0.6424521803634822, "grad_norm": 3.723600387573242, "learning_rate": 9.607911581358943e-05, "loss": 3.157, "step": 72750 }, { "epoch": 0.6428937282537664, "grad_norm": 1.398940086364746, "learning_rate": 9.607371977356918e-05, "loss": 3.2389, "step": 72800 }, { "epoch": 0.6433352761440506, "grad_norm": 3.619192600250244, "learning_rate": 9.606832017474477e-05, "loss": 3.5136, "step": 72850 }, { "epoch": 0.6437768240343348, "grad_norm": 1.5558445453643799, "learning_rate": 9.606291701753327e-05, "loss": 3.4748, "step": 72900 }, { "epoch": 0.644218371924619, "grad_norm": 3.18674635887146, "learning_rate": 9.605751030235204e-05, "loss": 2.6554, "step": 72950 }, { "epoch": 0.6446599198149031, "grad_norm": 4.52305793762207, "learning_rate": 9.605210002961868e-05, "loss": 3.6155, "step": 73000 }, { "epoch": 0.6451014677051873, "grad_norm": 5.28046989440918, "learning_rate": 9.604668619975112e-05, "loss": 3.404, "step": 73050 }, { "epoch": 0.6455430155954714, "grad_norm": 2.169076442718506, "learning_rate": 9.604126881316751e-05, "loss": 3.3904, "step": 73100 }, { "epoch": 0.6459845634857556, "grad_norm": 1.4948843717575073, "learning_rate": 9.60358478702863e-05, "loss": 3.1796, "step": 73150 }, { "epoch": 0.6464261113760399, "grad_norm": 3.89113450050354, "learning_rate": 9.603042337152622e-05, "loss": 3.6965, "step": 73200 }, { "epoch": 0.6468676592663241, "grad_norm": 0.8911652565002441, "learning_rate": 9.602499531730625e-05, "loss": 3.4166, "step": 73250 }, { "epoch": 0.6473092071566082, "grad_norm": 1.3251335620880127, "learning_rate": 9.601956370804567e-05, "loss": 3.0175, "step": 73300 }, { "epoch": 0.6477507550468924, "grad_norm": 3.875401735305786, "learning_rate": 9.601412854416403e-05, "loss": 3.5598, "step": 73350 }, { "epoch": 0.6481923029371766, "grad_norm": 2.4360857009887695, "learning_rate": 9.600868982608116e-05, "loss": 3.3592, "step": 73400 }, { "epoch": 0.6486338508274607, "grad_norm": 2.4810409545898438, "learning_rate": 9.600324755421715e-05, "loss": 3.4158, "step": 73450 }, { "epoch": 0.6490753987177449, "grad_norm": 1.3619318008422852, "learning_rate": 9.599780172899234e-05, "loss": 2.9944, "step": 73500 }, { "epoch": 0.649516946608029, "grad_norm": 2.7849841117858887, "learning_rate": 9.599235235082741e-05, "loss": 3.6618, "step": 73550 }, { "epoch": 0.6499584944983133, "grad_norm": 5.122074127197266, "learning_rate": 9.598689942014326e-05, "loss": 3.0184, "step": 73600 }, { "epoch": 0.6504000423885975, "grad_norm": 2.697990655899048, "learning_rate": 9.59814429373611e-05, "loss": 3.0678, "step": 73650 }, { "epoch": 0.6508415902788817, "grad_norm": 1.3273183107376099, "learning_rate": 9.597598290290237e-05, "loss": 2.9799, "step": 73700 }, { "epoch": 0.6512831381691658, "grad_norm": 1.5726739168167114, "learning_rate": 9.597051931718882e-05, "loss": 3.1769, "step": 73750 }, { "epoch": 0.65172468605945, "grad_norm": 2.7319064140319824, "learning_rate": 9.596505218064249e-05, "loss": 3.5969, "step": 73800 }, { "epoch": 0.6521662339497342, "grad_norm": 3.6344516277313232, "learning_rate": 9.595958149368563e-05, "loss": 3.2977, "step": 73850 }, { "epoch": 0.6526077818400183, "grad_norm": 2.6774423122406006, "learning_rate": 9.595410725674084e-05, "loss": 3.1628, "step": 73900 }, { "epoch": 0.6530493297303025, "grad_norm": 4.738192081451416, "learning_rate": 9.594862947023094e-05, "loss": 3.6289, "step": 73950 }, { "epoch": 0.6534908776205868, "grad_norm": 1.0268847942352295, "learning_rate": 9.594314813457905e-05, "loss": 3.6385, "step": 74000 }, { "epoch": 0.6539324255108709, "grad_norm": 2.7158186435699463, "learning_rate": 9.593766325020855e-05, "loss": 3.4732, "step": 74050 }, { "epoch": 0.6543739734011551, "grad_norm": 2.4069275856018066, "learning_rate": 9.593217481754311e-05, "loss": 3.493, "step": 74100 }, { "epoch": 0.6548155212914393, "grad_norm": 1.4273890256881714, "learning_rate": 9.592668283700665e-05, "loss": 3.4282, "step": 74150 }, { "epoch": 0.6552570691817234, "grad_norm": 1.4611910581588745, "learning_rate": 9.59211873090234e-05, "loss": 3.5503, "step": 74200 }, { "epoch": 0.6556986170720076, "grad_norm": 1.2271146774291992, "learning_rate": 9.591568823401782e-05, "loss": 3.081, "step": 74250 }, { "epoch": 0.6561401649622918, "grad_norm": 2.7792177200317383, "learning_rate": 9.591018561241467e-05, "loss": 3.4418, "step": 74300 }, { "epoch": 0.6565817128525759, "grad_norm": 5.776242733001709, "learning_rate": 9.5904679444639e-05, "loss": 3.5402, "step": 74350 }, { "epoch": 0.6570232607428602, "grad_norm": 2.6737334728240967, "learning_rate": 9.58991697311161e-05, "loss": 3.2469, "step": 74400 }, { "epoch": 0.6574648086331444, "grad_norm": 1.3555004596710205, "learning_rate": 9.589365647227155e-05, "loss": 3.3199, "step": 74450 }, { "epoch": 0.6579063565234285, "grad_norm": 3.3269264698028564, "learning_rate": 9.58881396685312e-05, "loss": 3.462, "step": 74500 }, { "epoch": 0.6583479044137127, "grad_norm": 2.6994597911834717, "learning_rate": 9.58826193203212e-05, "loss": 3.3227, "step": 74550 }, { "epoch": 0.6587894523039969, "grad_norm": 3.3708934783935547, "learning_rate": 9.587709542806792e-05, "loss": 3.1164, "step": 74600 }, { "epoch": 0.659231000194281, "grad_norm": 1.3764150142669678, "learning_rate": 9.587156799219804e-05, "loss": 3.3193, "step": 74650 }, { "epoch": 0.6596725480845652, "grad_norm": 4.554634094238281, "learning_rate": 9.586603701313852e-05, "loss": 3.5422, "step": 74700 }, { "epoch": 0.6601140959748494, "grad_norm": 4.359382152557373, "learning_rate": 9.586050249131658e-05, "loss": 3.2248, "step": 74750 }, { "epoch": 0.6605556438651337, "grad_norm": 4.0737714767456055, "learning_rate": 9.585496442715968e-05, "loss": 3.3829, "step": 74800 }, { "epoch": 0.6609971917554178, "grad_norm": 2.545942544937134, "learning_rate": 9.584942282109564e-05, "loss": 3.2756, "step": 74850 }, { "epoch": 0.661438739645702, "grad_norm": 3.28908109664917, "learning_rate": 9.584387767355247e-05, "loss": 3.4446, "step": 74900 }, { "epoch": 0.6618802875359862, "grad_norm": 5.4131340980529785, "learning_rate": 9.58383289849585e-05, "loss": 2.9662, "step": 74950 }, { "epoch": 0.6623218354262703, "grad_norm": 1.5948145389556885, "learning_rate": 9.58327767557423e-05, "loss": 3.3822, "step": 75000 }, { "epoch": 0.6623218354262703, "eval_asr_loss": 0.8949383808222489, "eval_loss": 3.007831573486328, "eval_runtime": 20.8134, "eval_samples_per_second": 36.899, "eval_steps_per_second": 9.225, "eval_tts_loss": 5.953438474404635, "step": 75000 }, { "epoch": 0.6627633833165545, "grad_norm": 3.2304179668426514, "learning_rate": 9.582722098633276e-05, "loss": 3.3734, "step": 75050 }, { "epoch": 0.6632049312068387, "grad_norm": 3.123548746109009, "learning_rate": 9.5821661677159e-05, "loss": 3.4995, "step": 75100 }, { "epoch": 0.6636464790971228, "grad_norm": 2.3339643478393555, "learning_rate": 9.581609882865044e-05, "loss": 3.3416, "step": 75150 }, { "epoch": 0.6640880269874071, "grad_norm": 1.0405722856521606, "learning_rate": 9.58106438036635e-05, "loss": 3.3155, "step": 75200 }, { "epoch": 0.6645295748776913, "grad_norm": 3.653280258178711, "learning_rate": 9.580507394853993e-05, "loss": 2.9206, "step": 75250 }, { "epoch": 0.6649711227679754, "grad_norm": 2.6631522178649902, "learning_rate": 9.579950055536279e-05, "loss": 3.1143, "step": 75300 }, { "epoch": 0.6654126706582596, "grad_norm": 2.752356767654419, "learning_rate": 9.579392362456264e-05, "loss": 3.4541, "step": 75350 }, { "epoch": 0.6658542185485438, "grad_norm": 1.8876862525939941, "learning_rate": 9.578834315657019e-05, "loss": 2.942, "step": 75400 }, { "epoch": 0.6662957664388279, "grad_norm": 3.700727939605713, "learning_rate": 9.57827591518165e-05, "loss": 3.4877, "step": 75450 }, { "epoch": 0.6667373143291121, "grad_norm": 1.4538304805755615, "learning_rate": 9.57771716107329e-05, "loss": 3.5755, "step": 75500 }, { "epoch": 0.6671788622193963, "grad_norm": 4.2887349128723145, "learning_rate": 9.577158053375096e-05, "loss": 3.3367, "step": 75550 }, { "epoch": 0.6676204101096805, "grad_norm": 4.3281025886535645, "learning_rate": 9.576598592130257e-05, "loss": 3.5268, "step": 75600 }, { "epoch": 0.6680619579999647, "grad_norm": 2.3635294437408447, "learning_rate": 9.576038777381984e-05, "loss": 3.6769, "step": 75650 }, { "epoch": 0.6685035058902489, "grad_norm": 4.012321949005127, "learning_rate": 9.57547860917352e-05, "loss": 3.1022, "step": 75700 }, { "epoch": 0.668945053780533, "grad_norm": 1.8018879890441895, "learning_rate": 9.574918087548132e-05, "loss": 3.4172, "step": 75750 }, { "epoch": 0.6693866016708172, "grad_norm": 2.358532190322876, "learning_rate": 9.574357212549116e-05, "loss": 3.1392, "step": 75800 }, { "epoch": 0.6698281495611014, "grad_norm": 2.4655981063842773, "learning_rate": 9.573795984219793e-05, "loss": 3.3193, "step": 75850 }, { "epoch": 0.6702696974513855, "grad_norm": 2.9911704063415527, "learning_rate": 9.573234402603517e-05, "loss": 3.563, "step": 75900 }, { "epoch": 0.6707112453416697, "grad_norm": 5.0430707931518555, "learning_rate": 9.572672467743664e-05, "loss": 3.2619, "step": 75950 }, { "epoch": 0.671152793231954, "grad_norm": 2.1028594970703125, "learning_rate": 9.572110179683637e-05, "loss": 3.6662, "step": 76000 }, { "epoch": 0.6715943411222381, "grad_norm": 2.4186270236968994, "learning_rate": 9.571547538466868e-05, "loss": 3.0861, "step": 76050 }, { "epoch": 0.6720358890125223, "grad_norm": 1.6107758283615112, "learning_rate": 9.570984544136819e-05, "loss": 3.9137, "step": 76100 }, { "epoch": 0.6724774369028065, "grad_norm": 3.140552043914795, "learning_rate": 9.570432467144772e-05, "loss": 3.643, "step": 76150 }, { "epoch": 0.6729189847930906, "grad_norm": 3.5983471870422363, "learning_rate": 9.569868773778745e-05, "loss": 3.6072, "step": 76200 }, { "epoch": 0.6733605326833748, "grad_norm": 3.1312403678894043, "learning_rate": 9.569304727429107e-05, "loss": 3.3982, "step": 76250 }, { "epoch": 0.673802080573659, "grad_norm": 1.2464442253112793, "learning_rate": 9.568740328139423e-05, "loss": 3.36, "step": 76300 }, { "epoch": 0.6742436284639431, "grad_norm": 1.5809202194213867, "learning_rate": 9.568175575953293e-05, "loss": 3.404, "step": 76350 }, { "epoch": 0.6746851763542274, "grad_norm": 2.486264944076538, "learning_rate": 9.567610470914334e-05, "loss": 3.4402, "step": 76400 }, { "epoch": 0.6751267242445116, "grad_norm": 1.804176926612854, "learning_rate": 9.5670450130662e-05, "loss": 3.2879, "step": 76450 }, { "epoch": 0.6755682721347958, "grad_norm": 1.7221146821975708, "learning_rate": 9.566479202452567e-05, "loss": 3.2473, "step": 76500 }, { "epoch": 0.6760098200250799, "grad_norm": 2.4581539630889893, "learning_rate": 9.565913039117138e-05, "loss": 3.7328, "step": 76550 }, { "epoch": 0.6764513679153641, "grad_norm": 1.7588177919387817, "learning_rate": 9.565346523103643e-05, "loss": 3.1056, "step": 76600 }, { "epoch": 0.6768929158056483, "grad_norm": 2.841600179672241, "learning_rate": 9.564779654455844e-05, "loss": 3.3631, "step": 76650 }, { "epoch": 0.6773344636959324, "grad_norm": 2.2738077640533447, "learning_rate": 9.564212433217523e-05, "loss": 3.4339, "step": 76700 }, { "epoch": 0.6777760115862166, "grad_norm": 1.1713546514511108, "learning_rate": 9.563644859432497e-05, "loss": 3.2702, "step": 76750 }, { "epoch": 0.6782175594765009, "grad_norm": 2.2375364303588867, "learning_rate": 9.563076933144602e-05, "loss": 3.6829, "step": 76800 }, { "epoch": 0.678659107366785, "grad_norm": 1.015380620956421, "learning_rate": 9.562508654397708e-05, "loss": 3.0635, "step": 76850 }, { "epoch": 0.6791006552570692, "grad_norm": 0.6230117082595825, "learning_rate": 9.56194002323571e-05, "loss": 3.5006, "step": 76900 }, { "epoch": 0.6795422031473534, "grad_norm": 1.3702956438064575, "learning_rate": 9.56137103970253e-05, "loss": 3.107, "step": 76950 }, { "epoch": 0.6799837510376375, "grad_norm": 1.878818392753601, "learning_rate": 9.560801703842117e-05, "loss": 3.6604, "step": 77000 }, { "epoch": 0.6804252989279217, "grad_norm": 1.3559465408325195, "learning_rate": 9.560232015698444e-05, "loss": 3.634, "step": 77050 }, { "epoch": 0.6808668468182059, "grad_norm": 15.994784355163574, "learning_rate": 9.559661975315519e-05, "loss": 3.4924, "step": 77100 }, { "epoch": 0.68130839470849, "grad_norm": 0.9013010859489441, "learning_rate": 9.559091582737372e-05, "loss": 3.474, "step": 77150 }, { "epoch": 0.6817499425987743, "grad_norm": 1.3329524993896484, "learning_rate": 9.558520838008059e-05, "loss": 3.4002, "step": 77200 }, { "epoch": 0.6821914904890585, "grad_norm": 2.5802180767059326, "learning_rate": 9.557949741171667e-05, "loss": 3.1695, "step": 77250 }, { "epoch": 0.6826330383793426, "grad_norm": 2.464118242263794, "learning_rate": 9.557378292272307e-05, "loss": 3.337, "step": 77300 }, { "epoch": 0.6830745862696268, "grad_norm": 1.7328948974609375, "learning_rate": 9.556806491354121e-05, "loss": 3.085, "step": 77350 }, { "epoch": 0.683516134159911, "grad_norm": 1.6609792709350586, "learning_rate": 9.556234338461274e-05, "loss": 3.7844, "step": 77400 }, { "epoch": 0.6839576820501951, "grad_norm": 3.412128210067749, "learning_rate": 9.55566183363796e-05, "loss": 3.3013, "step": 77450 }, { "epoch": 0.6843992299404793, "grad_norm": 1.4039990901947021, "learning_rate": 9.555088976928399e-05, "loss": 3.2333, "step": 77500 }, { "epoch": 0.6848407778307635, "grad_norm": 1.7826491594314575, "learning_rate": 9.55451576837684e-05, "loss": 3.4012, "step": 77550 }, { "epoch": 0.6852823257210477, "grad_norm": 3.7740890979766846, "learning_rate": 9.553942208027559e-05, "loss": 3.2525, "step": 77600 }, { "epoch": 0.6857238736113319, "grad_norm": 3.7978830337524414, "learning_rate": 9.553368295924861e-05, "loss": 3.3832, "step": 77650 }, { "epoch": 0.6861654215016161, "grad_norm": 1.697966456413269, "learning_rate": 9.552794032113073e-05, "loss": 3.2114, "step": 77700 }, { "epoch": 0.6866069693919002, "grad_norm": 1.4913134574890137, "learning_rate": 9.552219416636552e-05, "loss": 3.1685, "step": 77750 }, { "epoch": 0.6870485172821844, "grad_norm": 2.034682512283325, "learning_rate": 9.551644449539683e-05, "loss": 2.8857, "step": 77800 }, { "epoch": 0.6874900651724686, "grad_norm": 1.9270625114440918, "learning_rate": 9.551069130866877e-05, "loss": 3.5445, "step": 77850 }, { "epoch": 0.6879316130627527, "grad_norm": 5.204434871673584, "learning_rate": 9.550493460662572e-05, "loss": 3.3383, "step": 77900 }, { "epoch": 0.6883731609530369, "grad_norm": 3.010862350463867, "learning_rate": 9.549917438971235e-05, "loss": 3.3101, "step": 77950 }, { "epoch": 0.6888147088433212, "grad_norm": 0.8644163608551025, "learning_rate": 9.549341065837357e-05, "loss": 3.3201, "step": 78000 }, { "epoch": 0.6888147088433212, "eval_asr_loss": 0.8925247313395483, "eval_loss": 2.992865800857544, "eval_runtime": 20.774, "eval_samples_per_second": 36.969, "eval_steps_per_second": 9.242, "eval_tts_loss": 5.960387531532541, "step": 78000 }, { "epoch": 0.6892562567336054, "grad_norm": 2.4011969566345215, "learning_rate": 9.54876434130546e-05, "loss": 3.9292, "step": 78050 }, { "epoch": 0.6896978046238895, "grad_norm": 3.125196695327759, "learning_rate": 9.54818726542009e-05, "loss": 3.247, "step": 78100 }, { "epoch": 0.6901393525141737, "grad_norm": 1.368432879447937, "learning_rate": 9.547609838225821e-05, "loss": 2.951, "step": 78150 }, { "epoch": 0.6905809004044579, "grad_norm": 2.4032599925994873, "learning_rate": 9.547032059767253e-05, "loss": 3.5267, "step": 78200 }, { "epoch": 0.691022448294742, "grad_norm": 2.1227686405181885, "learning_rate": 9.546453930089019e-05, "loss": 3.2206, "step": 78250 }, { "epoch": 0.6914639961850262, "grad_norm": 1.2852637767791748, "learning_rate": 9.54587544923577e-05, "loss": 3.4086, "step": 78300 }, { "epoch": 0.6919055440753105, "grad_norm": 1.241897463798523, "learning_rate": 9.54529661725219e-05, "loss": 3.1159, "step": 78350 }, { "epoch": 0.6923470919655946, "grad_norm": 1.472129225730896, "learning_rate": 9.544717434182991e-05, "loss": 3.2442, "step": 78400 }, { "epoch": 0.6927886398558788, "grad_norm": 2.743114471435547, "learning_rate": 9.544137900072907e-05, "loss": 3.2978, "step": 78450 }, { "epoch": 0.693230187746163, "grad_norm": 2.720283031463623, "learning_rate": 9.543558014966703e-05, "loss": 3.5339, "step": 78500 }, { "epoch": 0.6936717356364471, "grad_norm": 2.2413861751556396, "learning_rate": 9.542977778909173e-05, "loss": 3.6654, "step": 78550 }, { "epoch": 0.6941132835267313, "grad_norm": 0.8887834548950195, "learning_rate": 9.542397191945129e-05, "loss": 2.9958, "step": 78600 }, { "epoch": 0.6945548314170155, "grad_norm": 1.2447916269302368, "learning_rate": 9.541816254119425e-05, "loss": 3.0432, "step": 78650 }, { "epoch": 0.6949963793072996, "grad_norm": 2.320293664932251, "learning_rate": 9.541234965476925e-05, "loss": 3.4944, "step": 78700 }, { "epoch": 0.6954379271975839, "grad_norm": 2.7362334728240967, "learning_rate": 9.540653326062534e-05, "loss": 3.4683, "step": 78750 }, { "epoch": 0.6958794750878681, "grad_norm": 1.2431176900863647, "learning_rate": 9.540071335921176e-05, "loss": 3.2625, "step": 78800 }, { "epoch": 0.6963210229781522, "grad_norm": 2.1052627563476562, "learning_rate": 9.539488995097807e-05, "loss": 3.3472, "step": 78850 }, { "epoch": 0.6967625708684364, "grad_norm": 2.4554333686828613, "learning_rate": 9.538906303637407e-05, "loss": 3.327, "step": 78900 }, { "epoch": 0.6972041187587206, "grad_norm": 2.1302428245544434, "learning_rate": 9.538323261584985e-05, "loss": 3.4227, "step": 78950 }, { "epoch": 0.6976456666490047, "grad_norm": 1.9906952381134033, "learning_rate": 9.537739868985574e-05, "loss": 3.8786, "step": 79000 }, { "epoch": 0.6980872145392889, "grad_norm": 4.157954216003418, "learning_rate": 9.537156125884236e-05, "loss": 3.0816, "step": 79050 }, { "epoch": 0.6985287624295731, "grad_norm": 4.053858757019043, "learning_rate": 9.536572032326061e-05, "loss": 3.2263, "step": 79100 }, { "epoch": 0.6989703103198573, "grad_norm": 1.9072597026824951, "learning_rate": 9.535987588356168e-05, "loss": 3.4913, "step": 79150 }, { "epoch": 0.6994118582101415, "grad_norm": 4.11290168762207, "learning_rate": 9.535402794019696e-05, "loss": 3.5914, "step": 79200 }, { "epoch": 0.6998534061004257, "grad_norm": 3.6236138343811035, "learning_rate": 9.534817649361816e-05, "loss": 3.0452, "step": 79250 }, { "epoch": 0.7002949539907098, "grad_norm": 4.064155578613281, "learning_rate": 9.534232154427728e-05, "loss": 3.5526, "step": 79300 }, { "epoch": 0.700736501880994, "grad_norm": 2.754270553588867, "learning_rate": 9.533646309262657e-05, "loss": 3.3366, "step": 79350 }, { "epoch": 0.7011780497712782, "grad_norm": 2.6481540203094482, "learning_rate": 9.533060113911852e-05, "loss": 3.6007, "step": 79400 }, { "epoch": 0.7016195976615623, "grad_norm": 3.322692632675171, "learning_rate": 9.532473568420591e-05, "loss": 3.1894, "step": 79450 }, { "epoch": 0.7020611455518465, "grad_norm": 2.9386980533599854, "learning_rate": 9.531886672834182e-05, "loss": 3.1106, "step": 79500 }, { "epoch": 0.7025026934421308, "grad_norm": 1.7416975498199463, "learning_rate": 9.531299427197957e-05, "loss": 3.5195, "step": 79550 }, { "epoch": 0.702944241332415, "grad_norm": 2.034696578979492, "learning_rate": 9.530711831557274e-05, "loss": 3.3598, "step": 79600 }, { "epoch": 0.7033857892226991, "grad_norm": 2.048043966293335, "learning_rate": 9.530123885957523e-05, "loss": 3.3274, "step": 79650 }, { "epoch": 0.7038273371129833, "grad_norm": 0.8644744157791138, "learning_rate": 9.529535590444115e-05, "loss": 3.3067, "step": 79700 }, { "epoch": 0.7042688850032675, "grad_norm": 3.338059186935425, "learning_rate": 9.528946945062493e-05, "loss": 3.2878, "step": 79750 }, { "epoch": 0.7047104328935516, "grad_norm": 1.424256682395935, "learning_rate": 9.528357949858123e-05, "loss": 3.5114, "step": 79800 }, { "epoch": 0.7051519807838358, "grad_norm": 3.7967846393585205, "learning_rate": 9.527768604876501e-05, "loss": 3.4413, "step": 79850 }, { "epoch": 0.70559352867412, "grad_norm": 3.256422996520996, "learning_rate": 9.52717891016315e-05, "loss": 3.0054, "step": 79900 }, { "epoch": 0.7060350765644042, "grad_norm": 3.2881686687469482, "learning_rate": 9.526588865763614e-05, "loss": 3.3926, "step": 79950 }, { "epoch": 0.7064766244546884, "grad_norm": 2.3848628997802734, "learning_rate": 9.525998471723476e-05, "loss": 3.3552, "step": 80000 }, { "epoch": 0.7069181723449726, "grad_norm": 3.0554099082946777, "learning_rate": 9.525407728088333e-05, "loss": 3.4239, "step": 80050 }, { "epoch": 0.7073597202352567, "grad_norm": 2.6418068408966064, "learning_rate": 9.524816634903819e-05, "loss": 3.2979, "step": 80100 }, { "epoch": 0.7078012681255409, "grad_norm": 1.501378059387207, "learning_rate": 9.52422519221559e-05, "loss": 3.7132, "step": 80150 }, { "epoch": 0.7082428160158251, "grad_norm": 3.162963390350342, "learning_rate": 9.523633400069327e-05, "loss": 3.7012, "step": 80200 }, { "epoch": 0.7086843639061092, "grad_norm": 0.6245143413543701, "learning_rate": 9.523041258510746e-05, "loss": 3.423, "step": 80250 }, { "epoch": 0.7091259117963934, "grad_norm": 0.8148535490036011, "learning_rate": 9.522448767585579e-05, "loss": 3.3979, "step": 80300 }, { "epoch": 0.7095674596866777, "grad_norm": 2.1384999752044678, "learning_rate": 9.521855927339595e-05, "loss": 2.7129, "step": 80350 }, { "epoch": 0.7100090075769618, "grad_norm": 3.697734832763672, "learning_rate": 9.521262737818586e-05, "loss": 3.0784, "step": 80400 }, { "epoch": 0.710450555467246, "grad_norm": 6.074985027313232, "learning_rate": 9.520669199068368e-05, "loss": 3.4716, "step": 80450 }, { "epoch": 0.7108921033575302, "grad_norm": 3.228442907333374, "learning_rate": 9.520075311134788e-05, "loss": 3.3426, "step": 80500 }, { "epoch": 0.7113336512478143, "grad_norm": 1.2277064323425293, "learning_rate": 9.51948107406372e-05, "loss": 3.2555, "step": 80550 }, { "epoch": 0.7117751991380985, "grad_norm": 0.9551520943641663, "learning_rate": 9.518886487901065e-05, "loss": 3.5223, "step": 80600 }, { "epoch": 0.7122167470283827, "grad_norm": 2.2914512157440186, "learning_rate": 9.518291552692746e-05, "loss": 2.9172, "step": 80650 }, { "epoch": 0.7126582949186668, "grad_norm": 3.3304460048675537, "learning_rate": 9.517696268484719e-05, "loss": 2.6855, "step": 80700 }, { "epoch": 0.7130998428089511, "grad_norm": 1.3867692947387695, "learning_rate": 9.517100635322966e-05, "loss": 2.9664, "step": 80750 }, { "epoch": 0.7135413906992353, "grad_norm": 1.0515697002410889, "learning_rate": 9.51650465325349e-05, "loss": 3.2619, "step": 80800 }, { "epoch": 0.7139829385895194, "grad_norm": 1.7852091789245605, "learning_rate": 9.515908322322329e-05, "loss": 3.0423, "step": 80850 }, { "epoch": 0.7144244864798036, "grad_norm": 1.8944436311721802, "learning_rate": 9.515311642575545e-05, "loss": 3.1392, "step": 80900 }, { "epoch": 0.7148660343700878, "grad_norm": 0.9419056177139282, "learning_rate": 9.514714614059225e-05, "loss": 3.1358, "step": 80950 }, { "epoch": 0.7153075822603719, "grad_norm": 3.2333171367645264, "learning_rate": 9.514117236819485e-05, "loss": 3.8758, "step": 81000 }, { "epoch": 0.7153075822603719, "eval_asr_loss": 0.9185007565241553, "eval_loss": 2.989028215408325, "eval_runtime": 20.4458, "eval_samples_per_second": 37.563, "eval_steps_per_second": 9.391, "eval_tts_loss": 5.958070210757005, "step": 81000 }, { "epoch": 0.7157491301506561, "grad_norm": 2.493988037109375, "learning_rate": 9.513519510902466e-05, "loss": 3.6492, "step": 81050 }, { "epoch": 0.7161906780409403, "grad_norm": 2.0979015827178955, "learning_rate": 9.512921436354339e-05, "loss": 3.7933, "step": 81100 }, { "epoch": 0.7166322259312246, "grad_norm": 2.046807050704956, "learning_rate": 9.512323013221299e-05, "loss": 3.291, "step": 81150 }, { "epoch": 0.7170737738215087, "grad_norm": 2.943680763244629, "learning_rate": 9.51172424154957e-05, "loss": 3.0745, "step": 81200 }, { "epoch": 0.7175153217117929, "grad_norm": 2.4897234439849854, "learning_rate": 9.5111251213854e-05, "loss": 3.1611, "step": 81250 }, { "epoch": 0.717956869602077, "grad_norm": 4.505876541137695, "learning_rate": 9.51052565277507e-05, "loss": 3.5499, "step": 81300 }, { "epoch": 0.7183984174923612, "grad_norm": 1.430411458015442, "learning_rate": 9.50992583576488e-05, "loss": 3.1281, "step": 81350 }, { "epoch": 0.7188399653826454, "grad_norm": 3.1512632369995117, "learning_rate": 9.509325670401164e-05, "loss": 3.4918, "step": 81400 }, { "epoch": 0.7192815132729296, "grad_norm": 1.2546844482421875, "learning_rate": 9.508725156730276e-05, "loss": 3.6289, "step": 81450 }, { "epoch": 0.7197230611632137, "grad_norm": 3.754129409790039, "learning_rate": 9.508124294798604e-05, "loss": 2.9913, "step": 81500 }, { "epoch": 0.720164609053498, "grad_norm": 3.811619997024536, "learning_rate": 9.507523084652556e-05, "loss": 3.233, "step": 81550 }, { "epoch": 0.7206061569437822, "grad_norm": 1.469282627105713, "learning_rate": 9.5069335609166e-05, "loss": 3.6196, "step": 81600 }, { "epoch": 0.7210477048340663, "grad_norm": 2.8807554244995117, "learning_rate": 9.50633166144312e-05, "loss": 2.9091, "step": 81650 }, { "epoch": 0.7214892527243505, "grad_norm": 2.3154757022857666, "learning_rate": 9.505729413893734e-05, "loss": 3.2319, "step": 81700 }, { "epoch": 0.7219308006146347, "grad_norm": 1.580367922782898, "learning_rate": 9.505126818314955e-05, "loss": 3.5418, "step": 81750 }, { "epoch": 0.7223723485049188, "grad_norm": 1.0496435165405273, "learning_rate": 9.504523874753335e-05, "loss": 3.6296, "step": 81800 }, { "epoch": 0.722813896395203, "grad_norm": 0.9904226660728455, "learning_rate": 9.50392058325544e-05, "loss": 3.2764, "step": 81850 }, { "epoch": 0.7232554442854872, "grad_norm": 1.5296332836151123, "learning_rate": 9.503316943867876e-05, "loss": 3.6219, "step": 81900 }, { "epoch": 0.7236969921757714, "grad_norm": 1.79270601272583, "learning_rate": 9.502712956637263e-05, "loss": 3.467, "step": 81950 }, { "epoch": 0.7241385400660556, "grad_norm": 3.750113010406494, "learning_rate": 9.502108621610255e-05, "loss": 2.9975, "step": 82000 }, { "epoch": 0.7245800879563398, "grad_norm": 1.8261284828186035, "learning_rate": 9.501503938833534e-05, "loss": 3.2391, "step": 82050 }, { "epoch": 0.7250216358466239, "grad_norm": 1.2408541440963745, "learning_rate": 9.500911012370587e-05, "loss": 3.4596, "step": 82100 }, { "epoch": 0.7254631837369081, "grad_norm": 1.0297032594680786, "learning_rate": 9.50030564118725e-05, "loss": 3.1892, "step": 82150 }, { "epoch": 0.7259047316271923, "grad_norm": 2.6377224922180176, "learning_rate": 9.499699922393466e-05, "loss": 3.624, "step": 82200 }, { "epoch": 0.7263462795174764, "grad_norm": 3.4818551540374756, "learning_rate": 9.499093856036019e-05, "loss": 3.1586, "step": 82250 }, { "epoch": 0.7267878274077606, "grad_norm": 2.5095982551574707, "learning_rate": 9.498487442161721e-05, "loss": 3.2636, "step": 82300 }, { "epoch": 0.7272293752980449, "grad_norm": 1.323671579360962, "learning_rate": 9.497880680817414e-05, "loss": 3.3924, "step": 82350 }, { "epoch": 0.727670923188329, "grad_norm": 3.4375061988830566, "learning_rate": 9.497273572049967e-05, "loss": 3.2829, "step": 82400 }, { "epoch": 0.7281124710786132, "grad_norm": 1.7869148254394531, "learning_rate": 9.496666115906272e-05, "loss": 3.5715, "step": 82450 }, { "epoch": 0.7285540189688974, "grad_norm": 2.402039051055908, "learning_rate": 9.49605831243325e-05, "loss": 3.3708, "step": 82500 }, { "epoch": 0.7289955668591815, "grad_norm": 2.0941081047058105, "learning_rate": 9.49545016167785e-05, "loss": 3.3561, "step": 82550 }, { "epoch": 0.7294371147494657, "grad_norm": 3.094398021697998, "learning_rate": 9.494841663687043e-05, "loss": 3.5185, "step": 82600 }, { "epoch": 0.7298786626397499, "grad_norm": 1.317842960357666, "learning_rate": 9.494232818507833e-05, "loss": 3.3928, "step": 82650 }, { "epoch": 0.730320210530034, "grad_norm": 2.3808138370513916, "learning_rate": 9.493623626187249e-05, "loss": 3.375, "step": 82700 }, { "epoch": 0.7307617584203183, "grad_norm": 1.0954011678695679, "learning_rate": 9.493014086772345e-05, "loss": 3.2433, "step": 82750 }, { "epoch": 0.7312033063106025, "grad_norm": 2.7866711616516113, "learning_rate": 9.492404200310201e-05, "loss": 2.8652, "step": 82800 }, { "epoch": 0.7316448542008867, "grad_norm": 0.6492549777030945, "learning_rate": 9.491793966847928e-05, "loss": 3.688, "step": 82850 }, { "epoch": 0.7320864020911708, "grad_norm": 2.792008876800537, "learning_rate": 9.491183386432659e-05, "loss": 3.7061, "step": 82900 }, { "epoch": 0.732527949981455, "grad_norm": 3.021859884262085, "learning_rate": 9.49057245911156e-05, "loss": 3.5694, "step": 82950 }, { "epoch": 0.7329694978717392, "grad_norm": 2.1141061782836914, "learning_rate": 9.489961184931815e-05, "loss": 3.1073, "step": 83000 }, { "epoch": 0.7334110457620233, "grad_norm": 1.2735497951507568, "learning_rate": 9.489349563940644e-05, "loss": 3.3421, "step": 83050 }, { "epoch": 0.7338525936523075, "grad_norm": 1.5497995615005493, "learning_rate": 9.488737596185286e-05, "loss": 3.6448, "step": 83100 }, { "epoch": 0.7342941415425918, "grad_norm": 2.1992032527923584, "learning_rate": 9.488125281713013e-05, "loss": 3.4424, "step": 83150 }, { "epoch": 0.7347356894328759, "grad_norm": 4.297890663146973, "learning_rate": 9.487512620571121e-05, "loss": 3.191, "step": 83200 }, { "epoch": 0.7351772373231601, "grad_norm": 2.3956549167633057, "learning_rate": 9.486899612806931e-05, "loss": 3.0592, "step": 83250 }, { "epoch": 0.7356187852134443, "grad_norm": 1.5929914712905884, "learning_rate": 9.486286258467793e-05, "loss": 3.214, "step": 83300 }, { "epoch": 0.7360603331037284, "grad_norm": 1.5399370193481445, "learning_rate": 9.485672557601087e-05, "loss": 3.3211, "step": 83350 }, { "epoch": 0.7365018809940126, "grad_norm": 3.4927520751953125, "learning_rate": 9.48505851025421e-05, "loss": 3.287, "step": 83400 }, { "epoch": 0.7369434288842968, "grad_norm": 1.6479640007019043, "learning_rate": 9.484444116474597e-05, "loss": 3.2647, "step": 83450 }, { "epoch": 0.7373849767745809, "grad_norm": 4.172652721405029, "learning_rate": 9.483829376309703e-05, "loss": 3.0049, "step": 83500 }, { "epoch": 0.7378265246648652, "grad_norm": 2.3201801776885986, "learning_rate": 9.48321428980701e-05, "loss": 2.9019, "step": 83550 }, { "epoch": 0.7382680725551494, "grad_norm": 3.2628872394561768, "learning_rate": 9.482598857014031e-05, "loss": 3.2633, "step": 83600 }, { "epoch": 0.7387096204454335, "grad_norm": 1.8258609771728516, "learning_rate": 9.481983077978302e-05, "loss": 3.2017, "step": 83650 }, { "epoch": 0.7391511683357177, "grad_norm": 2.0017619132995605, "learning_rate": 9.481366952747386e-05, "loss": 3.2454, "step": 83700 }, { "epoch": 0.7395927162260019, "grad_norm": 2.6693084239959717, "learning_rate": 9.480750481368872e-05, "loss": 3.6843, "step": 83750 }, { "epoch": 0.740034264116286, "grad_norm": 1.9272335767745972, "learning_rate": 9.480133663890382e-05, "loss": 3.0776, "step": 83800 }, { "epoch": 0.7404758120065702, "grad_norm": 0.973445475101471, "learning_rate": 9.479516500359555e-05, "loss": 3.5528, "step": 83850 }, { "epoch": 0.7409173598968544, "grad_norm": 1.443723201751709, "learning_rate": 9.478898990824064e-05, "loss": 3.3918, "step": 83900 }, { "epoch": 0.7413589077871386, "grad_norm": 3.7001953125, "learning_rate": 9.478281135331606e-05, "loss": 3.3402, "step": 83950 }, { "epoch": 0.7418004556774228, "grad_norm": 2.7895514965057373, "learning_rate": 9.477662933929905e-05, "loss": 3.2964, "step": 84000 }, { "epoch": 0.7418004556774228, "eval_asr_loss": 0.907389885219024, "eval_loss": 2.9814484119415283, "eval_runtime": 21.1732, "eval_samples_per_second": 36.272, "eval_steps_per_second": 9.068, "eval_tts_loss": 5.974965912665154, "step": 84000 }, { "epoch": 0.742242003567707, "grad_norm": 2.343669891357422, "learning_rate": 9.477044386666711e-05, "loss": 3.0479, "step": 84050 }, { "epoch": 0.7426835514579911, "grad_norm": 2.6981098651885986, "learning_rate": 9.476425493589804e-05, "loss": 3.538, "step": 84100 }, { "epoch": 0.7431250993482753, "grad_norm": 2.0038726329803467, "learning_rate": 9.475806254746984e-05, "loss": 3.3287, "step": 84150 }, { "epoch": 0.7435666472385595, "grad_norm": 2.3365066051483154, "learning_rate": 9.475186670186088e-05, "loss": 3.4029, "step": 84200 }, { "epoch": 0.7440081951288436, "grad_norm": 3.0482475757598877, "learning_rate": 9.474566739954966e-05, "loss": 3.1918, "step": 84250 }, { "epoch": 0.7444497430191278, "grad_norm": 2.1410951614379883, "learning_rate": 9.47394646410151e-05, "loss": 3.1646, "step": 84300 }, { "epoch": 0.7448912909094121, "grad_norm": 2.7029786109924316, "learning_rate": 9.473325842673626e-05, "loss": 3.3533, "step": 84350 }, { "epoch": 0.7453328387996963, "grad_norm": 1.4176026582717896, "learning_rate": 9.472704875719254e-05, "loss": 3.5932, "step": 84400 }, { "epoch": 0.7457743866899804, "grad_norm": 3.4000325202941895, "learning_rate": 9.472083563286358e-05, "loss": 2.8913, "step": 84450 }, { "epoch": 0.7462159345802646, "grad_norm": 3.8159403800964355, "learning_rate": 9.47146190542293e-05, "loss": 3.0607, "step": 84500 }, { "epoch": 0.7466574824705488, "grad_norm": 2.903031349182129, "learning_rate": 9.470839902176985e-05, "loss": 3.1426, "step": 84550 }, { "epoch": 0.7470990303608329, "grad_norm": 1.7477384805679321, "learning_rate": 9.470217553596568e-05, "loss": 3.1242, "step": 84600 }, { "epoch": 0.7475405782511171, "grad_norm": 1.4132603406906128, "learning_rate": 9.469594859729753e-05, "loss": 3.1462, "step": 84650 }, { "epoch": 0.7479821261414012, "grad_norm": 1.7881561517715454, "learning_rate": 9.468971820624636e-05, "loss": 3.1489, "step": 84700 }, { "epoch": 0.7484236740316855, "grad_norm": 0.7184849381446838, "learning_rate": 9.468348436329341e-05, "loss": 3.5103, "step": 84750 }, { "epoch": 0.7488652219219697, "grad_norm": 5.096653938293457, "learning_rate": 9.46772470689202e-05, "loss": 3.6296, "step": 84800 }, { "epoch": 0.7493067698122539, "grad_norm": 1.7660068273544312, "learning_rate": 9.467100632360851e-05, "loss": 3.2378, "step": 84850 }, { "epoch": 0.749748317702538, "grad_norm": 3.2901999950408936, "learning_rate": 9.466476212784038e-05, "loss": 2.8817, "step": 84900 }, { "epoch": 0.7501898655928222, "grad_norm": 0.6989944577217102, "learning_rate": 9.465851448209813e-05, "loss": 3.2027, "step": 84950 }, { "epoch": 0.7506314134831064, "grad_norm": 4.7878875732421875, "learning_rate": 9.465226338686431e-05, "loss": 3.0253, "step": 85000 }, { "epoch": 0.7510729613733905, "grad_norm": 2.0710065364837646, "learning_rate": 9.46460088426218e-05, "loss": 3.5431, "step": 85050 }, { "epoch": 0.7515145092636747, "grad_norm": 1.119075059890747, "learning_rate": 9.463975084985369e-05, "loss": 3.4444, "step": 85100 }, { "epoch": 0.751956057153959, "grad_norm": 2.28783917427063, "learning_rate": 9.463348940904335e-05, "loss": 3.6863, "step": 85150 }, { "epoch": 0.7523976050442431, "grad_norm": 2.2328102588653564, "learning_rate": 9.462722452067445e-05, "loss": 3.2708, "step": 85200 }, { "epoch": 0.7528391529345273, "grad_norm": 1.885800838470459, "learning_rate": 9.462095618523089e-05, "loss": 3.7459, "step": 85250 }, { "epoch": 0.7532807008248115, "grad_norm": 1.719815731048584, "learning_rate": 9.461468440319681e-05, "loss": 3.7436, "step": 85300 }, { "epoch": 0.7537222487150956, "grad_norm": 1.0964494943618774, "learning_rate": 9.460840917505668e-05, "loss": 2.7071, "step": 85350 }, { "epoch": 0.7541637966053798, "grad_norm": 2.1939446926116943, "learning_rate": 9.460213050129525e-05, "loss": 3.4522, "step": 85400 }, { "epoch": 0.754605344495664, "grad_norm": 3.344871759414673, "learning_rate": 9.459584838239743e-05, "loss": 3.3535, "step": 85450 }, { "epoch": 0.7550468923859481, "grad_norm": 3.5036964416503906, "learning_rate": 9.458956281884848e-05, "loss": 3.1035, "step": 85500 }, { "epoch": 0.7554884402762324, "grad_norm": 1.8744654655456543, "learning_rate": 9.458327381113392e-05, "loss": 3.4614, "step": 85550 }, { "epoch": 0.7559299881665166, "grad_norm": 3.193300485610962, "learning_rate": 9.457698135973951e-05, "loss": 3.2385, "step": 85600 }, { "epoch": 0.7563715360568007, "grad_norm": 2.4154229164123535, "learning_rate": 9.457068546515129e-05, "loss": 3.1358, "step": 85650 }, { "epoch": 0.7568130839470849, "grad_norm": 1.8072885274887085, "learning_rate": 9.456438612785556e-05, "loss": 3.0887, "step": 85700 }, { "epoch": 0.7572546318373691, "grad_norm": 4.455030918121338, "learning_rate": 9.455808334833892e-05, "loss": 2.7886, "step": 85750 }, { "epoch": 0.7576961797276532, "grad_norm": 3.4446539878845215, "learning_rate": 9.455177712708815e-05, "loss": 3.045, "step": 85800 }, { "epoch": 0.7581377276179374, "grad_norm": 1.5495901107788086, "learning_rate": 9.45454674645904e-05, "loss": 3.3734, "step": 85850 }, { "epoch": 0.7585792755082216, "grad_norm": 1.636919379234314, "learning_rate": 9.453915436133303e-05, "loss": 3.1687, "step": 85900 }, { "epoch": 0.7590208233985059, "grad_norm": 1.9204362630844116, "learning_rate": 9.453283781780365e-05, "loss": 3.5536, "step": 85950 }, { "epoch": 0.75946237128879, "grad_norm": 2.4546802043914795, "learning_rate": 9.45265178344902e-05, "loss": 3.0949, "step": 86000 }, { "epoch": 0.7599039191790742, "grad_norm": 5.441671848297119, "learning_rate": 9.45201944118808e-05, "loss": 3.2127, "step": 86050 }, { "epoch": 0.7603454670693583, "grad_norm": 3.458197593688965, "learning_rate": 9.45138675504639e-05, "loss": 3.4892, "step": 86100 }, { "epoch": 0.7607870149596425, "grad_norm": 1.5939244031906128, "learning_rate": 9.450753725072822e-05, "loss": 3.2989, "step": 86150 }, { "epoch": 0.7612285628499267, "grad_norm": 2.7477095127105713, "learning_rate": 9.450120351316268e-05, "loss": 3.1591, "step": 86200 }, { "epoch": 0.7616701107402108, "grad_norm": 2.2054810523986816, "learning_rate": 9.449486633825654e-05, "loss": 3.2117, "step": 86250 }, { "epoch": 0.762111658630495, "grad_norm": 2.03580379486084, "learning_rate": 9.448852572649926e-05, "loss": 2.9641, "step": 86300 }, { "epoch": 0.7625532065207793, "grad_norm": 1.0966379642486572, "learning_rate": 9.448218167838065e-05, "loss": 3.3211, "step": 86350 }, { "epoch": 0.7629947544110635, "grad_norm": 2.210423707962036, "learning_rate": 9.44758341943907e-05, "loss": 3.2672, "step": 86400 }, { "epoch": 0.7634363023013476, "grad_norm": 3.2034225463867188, "learning_rate": 9.446948327501967e-05, "loss": 3.6083, "step": 86450 }, { "epoch": 0.7638778501916318, "grad_norm": 2.616360664367676, "learning_rate": 9.446312892075818e-05, "loss": 3.1633, "step": 86500 }, { "epoch": 0.764319398081916, "grad_norm": 2.7256109714508057, "learning_rate": 9.445689832152417e-05, "loss": 3.1589, "step": 86550 }, { "epoch": 0.7647609459722001, "grad_norm": 3.060117483139038, "learning_rate": 9.445053716762777e-05, "loss": 3.142, "step": 86600 }, { "epoch": 0.7652024938624843, "grad_norm": 1.2337734699249268, "learning_rate": 9.444417258030429e-05, "loss": 3.1518, "step": 86650 }, { "epoch": 0.7656440417527685, "grad_norm": 2.3434503078460693, "learning_rate": 9.443780456004537e-05, "loss": 3.2844, "step": 86700 }, { "epoch": 0.7660855896430527, "grad_norm": 2.32765793800354, "learning_rate": 9.443143310734285e-05, "loss": 3.131, "step": 86750 }, { "epoch": 0.7665271375333369, "grad_norm": 1.1229861974716187, "learning_rate": 9.442505822268891e-05, "loss": 3.4084, "step": 86800 }, { "epoch": 0.7669686854236211, "grad_norm": 2.0934159755706787, "learning_rate": 9.441867990657594e-05, "loss": 3.3533, "step": 86850 }, { "epoch": 0.7674102333139052, "grad_norm": 5.029802322387695, "learning_rate": 9.441229815949658e-05, "loss": 3.2617, "step": 86900 }, { "epoch": 0.7678517812041894, "grad_norm": 3.596031665802002, "learning_rate": 9.440591298194382e-05, "loss": 3.3018, "step": 86950 }, { "epoch": 0.7682933290944736, "grad_norm": 0.9849772453308105, "learning_rate": 9.439952437441082e-05, "loss": 2.6819, "step": 87000 }, { "epoch": 0.7682933290944736, "eval_asr_loss": 0.9071832583308085, "eval_loss": 2.9738311767578125, "eval_runtime": 20.831, "eval_samples_per_second": 36.868, "eval_steps_per_second": 9.217, "eval_tts_loss": 5.941716546998216, "step": 87000 }, { "epoch": 0.7687348769847577, "grad_norm": 1.9874849319458008, "learning_rate": 9.439313233739105e-05, "loss": 3.5471, "step": 87050 }, { "epoch": 0.7691764248750419, "grad_norm": 2.417670249938965, "learning_rate": 9.438673687137827e-05, "loss": 3.2482, "step": 87100 }, { "epoch": 0.7696179727653262, "grad_norm": 1.2283480167388916, "learning_rate": 9.438033797686642e-05, "loss": 3.6392, "step": 87150 }, { "epoch": 0.7700595206556103, "grad_norm": 4.871400833129883, "learning_rate": 9.437393565434984e-05, "loss": 3.7572, "step": 87200 }, { "epoch": 0.7705010685458945, "grad_norm": 2.2227866649627686, "learning_rate": 9.436752990432298e-05, "loss": 3.0988, "step": 87250 }, { "epoch": 0.7709426164361787, "grad_norm": 1.317663550376892, "learning_rate": 9.436112072728067e-05, "loss": 2.342, "step": 87300 }, { "epoch": 0.7713841643264628, "grad_norm": 1.2101949453353882, "learning_rate": 9.435470812371796e-05, "loss": 3.0555, "step": 87350 }, { "epoch": 0.771825712216747, "grad_norm": 2.3560891151428223, "learning_rate": 9.434829209413016e-05, "loss": 3.287, "step": 87400 }, { "epoch": 0.7722672601070312, "grad_norm": 2.26143217086792, "learning_rate": 9.434187263901286e-05, "loss": 3.1895, "step": 87450 }, { "epoch": 0.7727088079973153, "grad_norm": 2.7307794094085693, "learning_rate": 9.43354497588619e-05, "loss": 3.0531, "step": 87500 }, { "epoch": 0.7731503558875996, "grad_norm": 2.08046293258667, "learning_rate": 9.43290234541734e-05, "loss": 3.5845, "step": 87550 }, { "epoch": 0.7735919037778838, "grad_norm": 1.427642822265625, "learning_rate": 9.432259372544375e-05, "loss": 3.372, "step": 87600 }, { "epoch": 0.774033451668168, "grad_norm": 1.8793132305145264, "learning_rate": 9.431616057316957e-05, "loss": 3.44, "step": 87650 }, { "epoch": 0.7744749995584521, "grad_norm": 5.893092632293701, "learning_rate": 9.430972399784779e-05, "loss": 3.3203, "step": 87700 }, { "epoch": 0.7749165474487363, "grad_norm": 1.8131071329116821, "learning_rate": 9.430328399997555e-05, "loss": 3.1405, "step": 87750 }, { "epoch": 0.7753580953390204, "grad_norm": 1.3549493551254272, "learning_rate": 9.429696948198171e-05, "loss": 3.4958, "step": 87800 }, { "epoch": 0.7757996432293046, "grad_norm": 2.8641469478607178, "learning_rate": 9.429052270892739e-05, "loss": 3.0084, "step": 87850 }, { "epoch": 0.7762411911195888, "grad_norm": 1.4089255332946777, "learning_rate": 9.428407251480576e-05, "loss": 3.5739, "step": 87900 }, { "epoch": 0.7766827390098731, "grad_norm": 2.792038679122925, "learning_rate": 9.427761890011504e-05, "loss": 3.0026, "step": 87950 }, { "epoch": 0.7771242869001572, "grad_norm": 2.8735337257385254, "learning_rate": 9.427116186535371e-05, "loss": 2.9788, "step": 88000 }, { "epoch": 0.7775658347904414, "grad_norm": 2.6922061443328857, "learning_rate": 9.426470141102058e-05, "loss": 3.5371, "step": 88050 }, { "epoch": 0.7780073826807256, "grad_norm": 2.8780505657196045, "learning_rate": 9.425823753761458e-05, "loss": 3.4691, "step": 88100 }, { "epoch": 0.7784489305710097, "grad_norm": 3.15449595451355, "learning_rate": 9.425177024563504e-05, "loss": 3.0253, "step": 88150 }, { "epoch": 0.7788904784612939, "grad_norm": 2.4547994136810303, "learning_rate": 9.42452995355815e-05, "loss": 3.22, "step": 88200 }, { "epoch": 0.779332026351578, "grad_norm": 1.3049603700637817, "learning_rate": 9.423882540795375e-05, "loss": 3.3362, "step": 88250 }, { "epoch": 0.7797735742418622, "grad_norm": 2.944639205932617, "learning_rate": 9.423234786325188e-05, "loss": 2.6225, "step": 88300 }, { "epoch": 0.7802151221321465, "grad_norm": 4.7420783042907715, "learning_rate": 9.42258669019762e-05, "loss": 2.852, "step": 88350 }, { "epoch": 0.7806566700224307, "grad_norm": 2.200601100921631, "learning_rate": 9.421938252462734e-05, "loss": 3.4574, "step": 88400 }, { "epoch": 0.7810982179127148, "grad_norm": 0.8284490704536438, "learning_rate": 9.421289473170615e-05, "loss": 3.3901, "step": 88450 }, { "epoch": 0.781539765802999, "grad_norm": 2.629199266433716, "learning_rate": 9.420640352371375e-05, "loss": 3.0143, "step": 88500 }, { "epoch": 0.7819813136932832, "grad_norm": 2.411001205444336, "learning_rate": 9.419990890115155e-05, "loss": 3.4572, "step": 88550 }, { "epoch": 0.7824228615835673, "grad_norm": 3.345630168914795, "learning_rate": 9.419341086452118e-05, "loss": 2.9808, "step": 88600 }, { "epoch": 0.7828644094738515, "grad_norm": 1.7286512851715088, "learning_rate": 9.418690941432458e-05, "loss": 3.3402, "step": 88650 }, { "epoch": 0.7833059573641357, "grad_norm": 2.867637872695923, "learning_rate": 9.418040455106391e-05, "loss": 3.3708, "step": 88700 }, { "epoch": 0.7837475052544199, "grad_norm": 1.4467108249664307, "learning_rate": 9.417389627524163e-05, "loss": 3.4814, "step": 88750 }, { "epoch": 0.7841890531447041, "grad_norm": 1.6191805601119995, "learning_rate": 9.416738458736045e-05, "loss": 3.4584, "step": 88800 }, { "epoch": 0.7846306010349883, "grad_norm": 2.4342501163482666, "learning_rate": 9.416086948792333e-05, "loss": 3.4172, "step": 88850 }, { "epoch": 0.7850721489252724, "grad_norm": 1.3565874099731445, "learning_rate": 9.415435097743352e-05, "loss": 3.6409, "step": 88900 }, { "epoch": 0.7855136968155566, "grad_norm": 0.9413079619407654, "learning_rate": 9.414782905639452e-05, "loss": 3.1165, "step": 88950 }, { "epoch": 0.7859552447058408, "grad_norm": 2.325162887573242, "learning_rate": 9.414130372531009e-05, "loss": 3.3463, "step": 89000 }, { "epoch": 0.7863967925961249, "grad_norm": 2.1379945278167725, "learning_rate": 9.413477498468423e-05, "loss": 3.416, "step": 89050 }, { "epoch": 0.7868383404864091, "grad_norm": 1.78729248046875, "learning_rate": 9.412824283502129e-05, "loss": 3.6121, "step": 89100 }, { "epoch": 0.7872798883766934, "grad_norm": 1.3895008563995361, "learning_rate": 9.412170727682576e-05, "loss": 3.1819, "step": 89150 }, { "epoch": 0.7877214362669775, "grad_norm": 3.693272352218628, "learning_rate": 9.41151683106025e-05, "loss": 3.3451, "step": 89200 }, { "epoch": 0.7881629841572617, "grad_norm": 1.3732582330703735, "learning_rate": 9.410862593685657e-05, "loss": 3.0712, "step": 89250 }, { "epoch": 0.7886045320475459, "grad_norm": 1.9743282794952393, "learning_rate": 9.410208015609333e-05, "loss": 3.7555, "step": 89300 }, { "epoch": 0.78904607993783, "grad_norm": 2.194549083709717, "learning_rate": 9.409553096881835e-05, "loss": 2.9958, "step": 89350 }, { "epoch": 0.7894876278281142, "grad_norm": 1.8362387418746948, "learning_rate": 9.408897837553754e-05, "loss": 3.4235, "step": 89400 }, { "epoch": 0.7899291757183984, "grad_norm": 1.8394923210144043, "learning_rate": 9.4082422376757e-05, "loss": 3.3358, "step": 89450 }, { "epoch": 0.7903707236086825, "grad_norm": 2.9508590698242188, "learning_rate": 9.407586297298314e-05, "loss": 3.5187, "step": 89500 }, { "epoch": 0.7908122714989668, "grad_norm": 1.0827395915985107, "learning_rate": 9.406930016472262e-05, "loss": 2.985, "step": 89550 }, { "epoch": 0.791253819389251, "grad_norm": 6.141767978668213, "learning_rate": 9.406273395248236e-05, "loss": 3.3644, "step": 89600 }, { "epoch": 0.7916953672795352, "grad_norm": 2.3642897605895996, "learning_rate": 9.405616433676954e-05, "loss": 2.9457, "step": 89650 }, { "epoch": 0.7921369151698193, "grad_norm": 1.307497501373291, "learning_rate": 9.404959131809163e-05, "loss": 3.5927, "step": 89700 }, { "epoch": 0.7925784630601035, "grad_norm": 0.8792401552200317, "learning_rate": 9.404301489695629e-05, "loss": 3.4736, "step": 89750 }, { "epoch": 0.7930200109503877, "grad_norm": 1.8458104133605957, "learning_rate": 9.403643507387155e-05, "loss": 3.1718, "step": 89800 }, { "epoch": 0.7934615588406718, "grad_norm": 0.40211722254753113, "learning_rate": 9.402985184934561e-05, "loss": 3.1028, "step": 89850 }, { "epoch": 0.793903106730956, "grad_norm": 1.4072984457015991, "learning_rate": 9.402326522388695e-05, "loss": 3.2689, "step": 89900 }, { "epoch": 0.7943446546212403, "grad_norm": 3.634361505508423, "learning_rate": 9.401667519800439e-05, "loss": 3.2542, "step": 89950 }, { "epoch": 0.7947862025115244, "grad_norm": 2.3660759925842285, "learning_rate": 9.401008177220691e-05, "loss": 3.3141, "step": 90000 }, { "epoch": 0.7947862025115244, "eval_asr_loss": 0.913769633646809, "eval_loss": 2.972637891769409, "eval_runtime": 20.6823, "eval_samples_per_second": 37.133, "eval_steps_per_second": 9.283, "eval_tts_loss": 5.941250072060806, "step": 90000 }, { "epoch": 0.7952277504018086, "grad_norm": 1.8747526407241821, "learning_rate": 9.400348494700382e-05, "loss": 3.2776, "step": 90050 }, { "epoch": 0.7956692982920928, "grad_norm": 2.4819157123565674, "learning_rate": 9.399688472290464e-05, "loss": 3.3672, "step": 90100 }, { "epoch": 0.7961108461823769, "grad_norm": 3.1009058952331543, "learning_rate": 9.399028110041921e-05, "loss": 3.7072, "step": 90150 }, { "epoch": 0.7965523940726611, "grad_norm": 2.1817233562469482, "learning_rate": 9.39836740800576e-05, "loss": 3.1293, "step": 90200 }, { "epoch": 0.7969939419629453, "grad_norm": 2.2436323165893555, "learning_rate": 9.397706366233011e-05, "loss": 3.7612, "step": 90250 }, { "epoch": 0.7974354898532294, "grad_norm": 2.8587071895599365, "learning_rate": 9.397058215732492e-05, "loss": 3.574, "step": 90300 }, { "epoch": 0.7978770377435137, "grad_norm": 1.305193543434143, "learning_rate": 9.396396501431968e-05, "loss": 3.2321, "step": 90350 }, { "epoch": 0.7983185856337979, "grad_norm": 4.059189319610596, "learning_rate": 9.395734447547093e-05, "loss": 3.2663, "step": 90400 }, { "epoch": 0.798760133524082, "grad_norm": 1.4845237731933594, "learning_rate": 9.395072054129008e-05, "loss": 3.2744, "step": 90450 }, { "epoch": 0.7992016814143662, "grad_norm": 3.5800607204437256, "learning_rate": 9.394409321228877e-05, "loss": 3.3557, "step": 90500 }, { "epoch": 0.7996432293046504, "grad_norm": 1.589889645576477, "learning_rate": 9.393746248897888e-05, "loss": 3.1891, "step": 90550 }, { "epoch": 0.8000847771949345, "grad_norm": 2.2087574005126953, "learning_rate": 9.39308283718726e-05, "loss": 3.4591, "step": 90600 }, { "epoch": 0.8005263250852187, "grad_norm": 2.9878342151641846, "learning_rate": 9.392419086148234e-05, "loss": 3.1537, "step": 90650 }, { "epoch": 0.8009678729755029, "grad_norm": 4.756255626678467, "learning_rate": 9.391754995832083e-05, "loss": 3.1161, "step": 90700 }, { "epoch": 0.8014094208657871, "grad_norm": 3.2718210220336914, "learning_rate": 9.391090566290097e-05, "loss": 3.6481, "step": 90750 }, { "epoch": 0.8018509687560713, "grad_norm": 1.9818165302276611, "learning_rate": 9.3904257975736e-05, "loss": 3.0561, "step": 90800 }, { "epoch": 0.8022925166463555, "grad_norm": 0.7319821715354919, "learning_rate": 9.38976068973394e-05, "loss": 3.6211, "step": 90850 }, { "epoch": 0.8027340645366396, "grad_norm": 1.0141242742538452, "learning_rate": 9.389095242822492e-05, "loss": 3.3727, "step": 90900 }, { "epoch": 0.8031756124269238, "grad_norm": 2.862752676010132, "learning_rate": 9.388429456890654e-05, "loss": 3.2647, "step": 90950 }, { "epoch": 0.803617160317208, "grad_norm": 2.0796523094177246, "learning_rate": 9.387763331989854e-05, "loss": 2.9279, "step": 91000 }, { "epoch": 0.8040587082074921, "grad_norm": 3.659575939178467, "learning_rate": 9.387096868171543e-05, "loss": 3.3732, "step": 91050 }, { "epoch": 0.8045002560977763, "grad_norm": 1.8139934539794922, "learning_rate": 9.3864300654872e-05, "loss": 3.4054, "step": 91100 }, { "epoch": 0.8049418039880606, "grad_norm": 1.9845603704452515, "learning_rate": 9.385762923988332e-05, "loss": 3.2696, "step": 91150 }, { "epoch": 0.8053833518783448, "grad_norm": 1.5647777318954468, "learning_rate": 9.385095443726466e-05, "loss": 2.9517, "step": 91200 }, { "epoch": 0.8058248997686289, "grad_norm": 2.7620863914489746, "learning_rate": 9.384427624753164e-05, "loss": 3.3531, "step": 91250 }, { "epoch": 0.8062664476589131, "grad_norm": 4.5521039962768555, "learning_rate": 9.383759467120006e-05, "loss": 2.9471, "step": 91300 }, { "epoch": 0.8067079955491973, "grad_norm": 6.784761428833008, "learning_rate": 9.3830909708786e-05, "loss": 3.0502, "step": 91350 }, { "epoch": 0.8071495434394814, "grad_norm": 2.283494472503662, "learning_rate": 9.382422136080587e-05, "loss": 3.2579, "step": 91400 }, { "epoch": 0.8075910913297656, "grad_norm": 2.49485182762146, "learning_rate": 9.381752962777626e-05, "loss": 3.2323, "step": 91450 }, { "epoch": 0.8080326392200498, "grad_norm": 1.1890743970870972, "learning_rate": 9.381083451021402e-05, "loss": 3.3812, "step": 91500 }, { "epoch": 0.808474187110334, "grad_norm": 1.023622989654541, "learning_rate": 9.380413600863633e-05, "loss": 3.1125, "step": 91550 }, { "epoch": 0.8089157350006182, "grad_norm": 3.7999751567840576, "learning_rate": 9.37974341235606e-05, "loss": 3.0946, "step": 91600 }, { "epoch": 0.8093572828909024, "grad_norm": 2.7334210872650146, "learning_rate": 9.379072885550446e-05, "loss": 3.2808, "step": 91650 }, { "epoch": 0.8097988307811865, "grad_norm": 1.0871787071228027, "learning_rate": 9.378402020498585e-05, "loss": 2.9201, "step": 91700 }, { "epoch": 0.8102403786714707, "grad_norm": 1.837319254875183, "learning_rate": 9.377730817252296e-05, "loss": 3.3392, "step": 91750 }, { "epoch": 0.8106819265617549, "grad_norm": 1.857426643371582, "learning_rate": 9.377059275863423e-05, "loss": 3.4567, "step": 91800 }, { "epoch": 0.811123474452039, "grad_norm": 2.4178388118743896, "learning_rate": 9.37638739638384e-05, "loss": 3.1172, "step": 91850 }, { "epoch": 0.8115650223423232, "grad_norm": 1.7428170442581177, "learning_rate": 9.375715178865438e-05, "loss": 3.2541, "step": 91900 }, { "epoch": 0.8120065702326075, "grad_norm": 2.188565731048584, "learning_rate": 9.375042623360143e-05, "loss": 3.0557, "step": 91950 }, { "epoch": 0.8124481181228916, "grad_norm": 2.7001125812530518, "learning_rate": 9.374369729919907e-05, "loss": 3.4467, "step": 92000 }, { "epoch": 0.8128896660131758, "grad_norm": 0.5670623183250427, "learning_rate": 9.373696498596703e-05, "loss": 3.4051, "step": 92050 }, { "epoch": 0.81333121390346, "grad_norm": 0.8684957027435303, "learning_rate": 9.37302292944253e-05, "loss": 3.4903, "step": 92100 }, { "epoch": 0.8137727617937441, "grad_norm": 1.7560847997665405, "learning_rate": 9.372349022509419e-05, "loss": 2.8986, "step": 92150 }, { "epoch": 0.8142143096840283, "grad_norm": 1.301934838294983, "learning_rate": 9.371674777849423e-05, "loss": 3.2969, "step": 92200 }, { "epoch": 0.8146558575743125, "grad_norm": 5.415755748748779, "learning_rate": 9.37100019551462e-05, "loss": 3.5175, "step": 92250 }, { "epoch": 0.8150974054645966, "grad_norm": 1.403662085533142, "learning_rate": 9.370325275557119e-05, "loss": 3.3363, "step": 92300 }, { "epoch": 0.8155389533548809, "grad_norm": 0.34755173325538635, "learning_rate": 9.369650018029047e-05, "loss": 2.8427, "step": 92350 }, { "epoch": 0.8159805012451651, "grad_norm": 4.63238000869751, "learning_rate": 9.368974422982567e-05, "loss": 3.401, "step": 92400 }, { "epoch": 0.8164220491354492, "grad_norm": 5.1991286277771, "learning_rate": 9.36829849046986e-05, "loss": 3.0604, "step": 92450 }, { "epoch": 0.8168635970257334, "grad_norm": 4.037189960479736, "learning_rate": 9.367622220543136e-05, "loss": 3.4574, "step": 92500 }, { "epoch": 0.8173051449160176, "grad_norm": 3.1938400268554688, "learning_rate": 9.366945613254631e-05, "loss": 3.4916, "step": 92550 }, { "epoch": 0.8177466928063017, "grad_norm": 1.4730736017227173, "learning_rate": 9.36626866865661e-05, "loss": 3.5001, "step": 92600 }, { "epoch": 0.8181882406965859, "grad_norm": 2.499075174331665, "learning_rate": 9.36559138680136e-05, "loss": 3.3653, "step": 92650 }, { "epoch": 0.8186297885868701, "grad_norm": 3.067751884460449, "learning_rate": 9.364913767741192e-05, "loss": 2.9999, "step": 92700 }, { "epoch": 0.8190713364771544, "grad_norm": 3.1233701705932617, "learning_rate": 9.36423581152845e-05, "loss": 3.377, "step": 92750 }, { "epoch": 0.8195128843674385, "grad_norm": 1.1378720998764038, "learning_rate": 9.363557518215498e-05, "loss": 2.8108, "step": 92800 }, { "epoch": 0.8199544322577227, "grad_norm": 1.5843881368637085, "learning_rate": 9.362878887854732e-05, "loss": 3.4633, "step": 92850 }, { "epoch": 0.8203959801480069, "grad_norm": 3.084805965423584, "learning_rate": 9.362199920498566e-05, "loss": 3.3343, "step": 92900 }, { "epoch": 0.820837528038291, "grad_norm": 1.3744654655456543, "learning_rate": 9.361520616199446e-05, "loss": 2.9136, "step": 92950 }, { "epoch": 0.8212790759285752, "grad_norm": 3.1736974716186523, "learning_rate": 9.360840975009845e-05, "loss": 3.1212, "step": 93000 }, { "epoch": 0.8212790759285752, "eval_asr_loss": 0.9125425967281584, "eval_loss": 2.9801833629608154, "eval_runtime": 20.5623, "eval_samples_per_second": 37.35, "eval_steps_per_second": 9.337, "eval_tts_loss": 5.957149096143686, "step": 93000 }, { "epoch": 0.8217206238188594, "grad_norm": 2.6484904289245605, "learning_rate": 9.360160996982256e-05, "loss": 3.3256, "step": 93050 }, { "epoch": 0.8221621717091435, "grad_norm": 1.6820108890533447, "learning_rate": 9.359480682169204e-05, "loss": 3.3486, "step": 93100 }, { "epoch": 0.8226037195994278, "grad_norm": 1.407516360282898, "learning_rate": 9.358800030623236e-05, "loss": 3.4169, "step": 93150 }, { "epoch": 0.823045267489712, "grad_norm": 1.139156699180603, "learning_rate": 9.358119042396929e-05, "loss": 2.8844, "step": 93200 }, { "epoch": 0.8234868153799961, "grad_norm": 1.5392656326293945, "learning_rate": 9.35743771754288e-05, "loss": 3.5062, "step": 93250 }, { "epoch": 0.8239283632702803, "grad_norm": 1.2801883220672607, "learning_rate": 9.356756056113717e-05, "loss": 2.9979, "step": 93300 }, { "epoch": 0.8243699111605645, "grad_norm": 2.373271942138672, "learning_rate": 9.356074058162094e-05, "loss": 2.8897, "step": 93350 }, { "epoch": 0.8248114590508486, "grad_norm": 0.7981138229370117, "learning_rate": 9.355391723740687e-05, "loss": 3.7609, "step": 93400 }, { "epoch": 0.8252530069411328, "grad_norm": 1.8183437585830688, "learning_rate": 9.354709052902204e-05, "loss": 3.2145, "step": 93450 }, { "epoch": 0.825694554831417, "grad_norm": 1.9743393659591675, "learning_rate": 9.354026045699372e-05, "loss": 3.5426, "step": 93500 }, { "epoch": 0.8261361027217012, "grad_norm": 2.351931571960449, "learning_rate": 9.35334270218495e-05, "loss": 3.347, "step": 93550 }, { "epoch": 0.8265776506119854, "grad_norm": 1.7274210453033447, "learning_rate": 9.352659022411718e-05, "loss": 2.9723, "step": 93600 }, { "epoch": 0.8270191985022696, "grad_norm": 3.6531035900115967, "learning_rate": 9.351975006432487e-05, "loss": 3.311, "step": 93650 }, { "epoch": 0.8274607463925537, "grad_norm": 5.743092060089111, "learning_rate": 9.351290654300091e-05, "loss": 3.1698, "step": 93700 }, { "epoch": 0.8279022942828379, "grad_norm": 1.3943450450897217, "learning_rate": 9.35060596606739e-05, "loss": 3.0665, "step": 93750 }, { "epoch": 0.8283438421731221, "grad_norm": 3.4705088138580322, "learning_rate": 9.349920941787269e-05, "loss": 3.5611, "step": 93800 }, { "epoch": 0.8287853900634062, "grad_norm": 1.309612512588501, "learning_rate": 9.34923558151264e-05, "loss": 3.4221, "step": 93850 }, { "epoch": 0.8292269379536905, "grad_norm": 1.8064721822738647, "learning_rate": 9.348549885296446e-05, "loss": 3.564, "step": 93900 }, { "epoch": 0.8296684858439747, "grad_norm": 1.7501392364501953, "learning_rate": 9.347863853191646e-05, "loss": 3.0225, "step": 93950 }, { "epoch": 0.8301100337342588, "grad_norm": 3.4473793506622314, "learning_rate": 9.347177485251233e-05, "loss": 3.0757, "step": 94000 }, { "epoch": 0.830551581624543, "grad_norm": 1.1141574382781982, "learning_rate": 9.346490781528221e-05, "loss": 3.0982, "step": 94050 }, { "epoch": 0.8309931295148272, "grad_norm": 3.239313840866089, "learning_rate": 9.345803742075656e-05, "loss": 3.463, "step": 94100 }, { "epoch": 0.8314346774051113, "grad_norm": 2.90984845161438, "learning_rate": 9.345116366946601e-05, "loss": 3.131, "step": 94150 }, { "epoch": 0.8318762252953955, "grad_norm": 3.623865842819214, "learning_rate": 9.344428656194153e-05, "loss": 3.4718, "step": 94200 }, { "epoch": 0.8323177731856797, "grad_norm": 0.8482750058174133, "learning_rate": 9.343740609871431e-05, "loss": 3.3195, "step": 94250 }, { "epoch": 0.832759321075964, "grad_norm": 1.9041322469711304, "learning_rate": 9.343052228031582e-05, "loss": 3.3709, "step": 94300 }, { "epoch": 0.8332008689662481, "grad_norm": 3.6412575244903564, "learning_rate": 9.342363510727773e-05, "loss": 3.3433, "step": 94350 }, { "epoch": 0.8336424168565323, "grad_norm": 1.070917010307312, "learning_rate": 9.341674458013208e-05, "loss": 3.5517, "step": 94400 }, { "epoch": 0.8340839647468165, "grad_norm": 4.740517616271973, "learning_rate": 9.340985069941105e-05, "loss": 3.3758, "step": 94450 }, { "epoch": 0.8345255126371006, "grad_norm": 0.6648314595222473, "learning_rate": 9.340295346564719e-05, "loss": 3.5991, "step": 94500 }, { "epoch": 0.8349670605273848, "grad_norm": 3.5965664386749268, "learning_rate": 9.339605287937319e-05, "loss": 3.4054, "step": 94550 }, { "epoch": 0.835408608417669, "grad_norm": 2.838550329208374, "learning_rate": 9.33891489411221e-05, "loss": 3.0964, "step": 94600 }, { "epoch": 0.8358501563079531, "grad_norm": 1.3823540210723877, "learning_rate": 9.338224165142719e-05, "loss": 3.5835, "step": 94650 }, { "epoch": 0.8362917041982374, "grad_norm": 2.0661122798919678, "learning_rate": 9.337533101082199e-05, "loss": 3.2928, "step": 94700 }, { "epoch": 0.8367332520885216, "grad_norm": 1.8618370294570923, "learning_rate": 9.336841701984027e-05, "loss": 3.0778, "step": 94750 }, { "epoch": 0.8371747999788057, "grad_norm": 4.694985389709473, "learning_rate": 9.336149967901609e-05, "loss": 3.0738, "step": 94800 }, { "epoch": 0.8376163478690899, "grad_norm": 2.5283780097961426, "learning_rate": 9.335457898888376e-05, "loss": 3.4458, "step": 94850 }, { "epoch": 0.8380578957593741, "grad_norm": 1.0920939445495605, "learning_rate": 9.334765494997784e-05, "loss": 3.2661, "step": 94900 }, { "epoch": 0.8384994436496582, "grad_norm": 2.00156307220459, "learning_rate": 9.334072756283314e-05, "loss": 3.2389, "step": 94950 }, { "epoch": 0.8389409915399424, "grad_norm": 2.5333197116851807, "learning_rate": 9.333379682798475e-05, "loss": 3.5175, "step": 95000 }, { "epoch": 0.8393825394302266, "grad_norm": 2.970446825027466, "learning_rate": 9.332686274596803e-05, "loss": 3.0734, "step": 95050 }, { "epoch": 0.8398240873205108, "grad_norm": 1.0278738737106323, "learning_rate": 9.331992531731857e-05, "loss": 3.5306, "step": 95100 }, { "epoch": 0.840265635210795, "grad_norm": 2.5087051391601562, "learning_rate": 9.331298454257221e-05, "loss": 3.3883, "step": 95150 }, { "epoch": 0.8407071831010792, "grad_norm": 1.317252278327942, "learning_rate": 9.330604042226507e-05, "loss": 3.2772, "step": 95200 }, { "epoch": 0.8411487309913633, "grad_norm": 2.6759376525878906, "learning_rate": 9.329909295693355e-05, "loss": 2.9157, "step": 95250 }, { "epoch": 0.8415902788816475, "grad_norm": 1.9456520080566406, "learning_rate": 9.329214214711426e-05, "loss": 3.2917, "step": 95300 }, { "epoch": 0.8420318267719317, "grad_norm": 2.113647937774658, "learning_rate": 9.328532710918674e-05, "loss": 3.3203, "step": 95350 }, { "epoch": 0.8424733746622158, "grad_norm": 1.5438154935836792, "learning_rate": 9.327836967886584e-05, "loss": 3.6364, "step": 95400 }, { "epoch": 0.8429149225525, "grad_norm": 1.4090991020202637, "learning_rate": 9.32714089056579e-05, "loss": 3.2839, "step": 95450 }, { "epoch": 0.8433564704427843, "grad_norm": 3.7097344398498535, "learning_rate": 9.326444479010055e-05, "loss": 3.1992, "step": 95500 }, { "epoch": 0.8437980183330684, "grad_norm": 1.130540370941162, "learning_rate": 9.32574773327317e-05, "loss": 3.123, "step": 95550 }, { "epoch": 0.8442395662233526, "grad_norm": 3.1675918102264404, "learning_rate": 9.325050653408954e-05, "loss": 3.4446, "step": 95600 }, { "epoch": 0.8446811141136368, "grad_norm": 7.838589191436768, "learning_rate": 9.32435323947125e-05, "loss": 3.1731, "step": 95650 }, { "epoch": 0.8451226620039209, "grad_norm": 2.1061744689941406, "learning_rate": 9.32365549151393e-05, "loss": 3.1445, "step": 95700 }, { "epoch": 0.8455642098942051, "grad_norm": 1.585892677307129, "learning_rate": 9.322957409590884e-05, "loss": 3.248, "step": 95750 }, { "epoch": 0.8460057577844893, "grad_norm": 3.2491941452026367, "learning_rate": 9.322258993756037e-05, "loss": 3.1858, "step": 95800 }, { "epoch": 0.8464473056747734, "grad_norm": 4.585638999938965, "learning_rate": 9.321560244063334e-05, "loss": 3.3065, "step": 95850 }, { "epoch": 0.8468888535650577, "grad_norm": 1.343159556388855, "learning_rate": 9.320861160566748e-05, "loss": 3.3079, "step": 95900 }, { "epoch": 0.8473304014553419, "grad_norm": 4.3721699714660645, "learning_rate": 9.320161743320277e-05, "loss": 3.1931, "step": 95950 }, { "epoch": 0.847771949345626, "grad_norm": 1.243537425994873, "learning_rate": 9.319461992377945e-05, "loss": 3.2813, "step": 96000 }, { "epoch": 0.847771949345626, "eval_asr_loss": 0.9113168214024796, "eval_loss": 2.9808547496795654, "eval_runtime": 20.2448, "eval_samples_per_second": 37.936, "eval_steps_per_second": 9.484, "eval_tts_loss": 5.98077398558446, "step": 96000 }, { "epoch": 0.8482134972359102, "grad_norm": 3.9162681102752686, "learning_rate": 9.318761907793804e-05, "loss": 3.2729, "step": 96050 }, { "epoch": 0.8486550451261944, "grad_norm": 0.9850695729255676, "learning_rate": 9.318061489621925e-05, "loss": 3.4972, "step": 96100 }, { "epoch": 0.8490965930164786, "grad_norm": 2.3384222984313965, "learning_rate": 9.317360737916415e-05, "loss": 3.5748, "step": 96150 }, { "epoch": 0.8495381409067627, "grad_norm": 1.6895709037780762, "learning_rate": 9.316659652731395e-05, "loss": 3.5112, "step": 96200 }, { "epoch": 0.8499796887970469, "grad_norm": 4.779604434967041, "learning_rate": 9.315958234121023e-05, "loss": 3.4245, "step": 96250 }, { "epoch": 0.8504212366873312, "grad_norm": 3.7872555255889893, "learning_rate": 9.315256482139475e-05, "loss": 3.4721, "step": 96300 }, { "epoch": 0.8508627845776153, "grad_norm": 1.2569001913070679, "learning_rate": 9.314554396840956e-05, "loss": 3.2695, "step": 96350 }, { "epoch": 0.8513043324678995, "grad_norm": 2.095500946044922, "learning_rate": 9.313851978279697e-05, "loss": 3.1881, "step": 96400 }, { "epoch": 0.8517458803581837, "grad_norm": 0.9828936457633972, "learning_rate": 9.313149226509952e-05, "loss": 3.0915, "step": 96450 }, { "epoch": 0.8521874282484678, "grad_norm": 2.886770725250244, "learning_rate": 9.312446141586004e-05, "loss": 3.4099, "step": 96500 }, { "epoch": 0.852628976138752, "grad_norm": 4.082062721252441, "learning_rate": 9.31174272356216e-05, "loss": 3.0428, "step": 96550 }, { "epoch": 0.8530705240290362, "grad_norm": 1.261383295059204, "learning_rate": 9.311038972492754e-05, "loss": 3.2126, "step": 96600 }, { "epoch": 0.8535120719193203, "grad_norm": 1.2416714429855347, "learning_rate": 9.310334888432142e-05, "loss": 3.2021, "step": 96650 }, { "epoch": 0.8539536198096046, "grad_norm": 3.1885976791381836, "learning_rate": 9.309630471434712e-05, "loss": 2.7708, "step": 96700 }, { "epoch": 0.8543951676998888, "grad_norm": 0.710462749004364, "learning_rate": 9.308925721554874e-05, "loss": 3.199, "step": 96750 }, { "epoch": 0.8548367155901729, "grad_norm": 5.235406875610352, "learning_rate": 9.308220638847059e-05, "loss": 3.2963, "step": 96800 }, { "epoch": 0.8552782634804571, "grad_norm": 1.9540880918502808, "learning_rate": 9.307515223365735e-05, "loss": 3.3895, "step": 96850 }, { "epoch": 0.8557198113707413, "grad_norm": 2.796398162841797, "learning_rate": 9.306809475165385e-05, "loss": 3.4795, "step": 96900 }, { "epoch": 0.8561613592610254, "grad_norm": 1.8404650688171387, "learning_rate": 9.306103394300525e-05, "loss": 3.2471, "step": 96950 }, { "epoch": 0.8566029071513096, "grad_norm": 1.5861579179763794, "learning_rate": 9.305396980825693e-05, "loss": 2.8261, "step": 97000 }, { "epoch": 0.8570444550415938, "grad_norm": 5.508571624755859, "learning_rate": 9.304690234795453e-05, "loss": 3.0444, "step": 97050 }, { "epoch": 0.857486002931878, "grad_norm": 3.7076914310455322, "learning_rate": 9.303983156264393e-05, "loss": 3.3217, "step": 97100 }, { "epoch": 0.8579275508221622, "grad_norm": 4.032550811767578, "learning_rate": 9.303275745287133e-05, "loss": 3.3205, "step": 97150 }, { "epoch": 0.8583690987124464, "grad_norm": 0.970733106136322, "learning_rate": 9.30256800191831e-05, "loss": 3.1487, "step": 97200 }, { "epoch": 0.8588106466027305, "grad_norm": 2.3852109909057617, "learning_rate": 9.301859926212595e-05, "loss": 3.1561, "step": 97250 }, { "epoch": 0.8592521944930147, "grad_norm": 1.1150777339935303, "learning_rate": 9.301151518224679e-05, "loss": 2.9001, "step": 97300 }, { "epoch": 0.8596937423832989, "grad_norm": 3.74100923538208, "learning_rate": 9.300442778009282e-05, "loss": 3.2136, "step": 97350 }, { "epoch": 0.860135290273583, "grad_norm": 2.762349843978882, "learning_rate": 9.299733705621145e-05, "loss": 3.4975, "step": 97400 }, { "epoch": 0.8605768381638672, "grad_norm": 0.8698297142982483, "learning_rate": 9.299024301115043e-05, "loss": 3.3572, "step": 97450 }, { "epoch": 0.8610183860541515, "grad_norm": 3.2828052043914795, "learning_rate": 9.298314564545765e-05, "loss": 3.3767, "step": 97500 }, { "epoch": 0.8614599339444357, "grad_norm": 2.659813642501831, "learning_rate": 9.297604495968138e-05, "loss": 3.5504, "step": 97550 }, { "epoch": 0.8619014818347198, "grad_norm": 1.7123510837554932, "learning_rate": 9.296894095437005e-05, "loss": 3.4038, "step": 97600 }, { "epoch": 0.862343029725004, "grad_norm": 2.9085240364074707, "learning_rate": 9.296183363007241e-05, "loss": 2.8643, "step": 97650 }, { "epoch": 0.8627845776152882, "grad_norm": 1.4961036443710327, "learning_rate": 9.295472298733742e-05, "loss": 3.1688, "step": 97700 }, { "epoch": 0.8632261255055723, "grad_norm": 2.028132677078247, "learning_rate": 9.294760902671432e-05, "loss": 3.3077, "step": 97750 }, { "epoch": 0.8636676733958565, "grad_norm": 1.637831449508667, "learning_rate": 9.294049174875261e-05, "loss": 2.9796, "step": 97800 }, { "epoch": 0.8641092212861406, "grad_norm": 1.5504677295684814, "learning_rate": 9.293337115400205e-05, "loss": 2.8288, "step": 97850 }, { "epoch": 0.8645507691764249, "grad_norm": 3.117736577987671, "learning_rate": 9.292638975372799e-05, "loss": 2.9301, "step": 97900 }, { "epoch": 0.8649923170667091, "grad_norm": 2.2668349742889404, "learning_rate": 9.291926259335835e-05, "loss": 3.8076, "step": 97950 }, { "epoch": 0.8654338649569933, "grad_norm": 3.2014098167419434, "learning_rate": 9.29121321178396e-05, "loss": 3.1708, "step": 98000 }, { "epoch": 0.8658754128472774, "grad_norm": 3.2920122146606445, "learning_rate": 9.290499832772255e-05, "loss": 3.3037, "step": 98050 }, { "epoch": 0.8663169607375616, "grad_norm": 1.8121235370635986, "learning_rate": 9.28978612235582e-05, "loss": 3.2196, "step": 98100 }, { "epoch": 0.8667585086278458, "grad_norm": 1.6291028261184692, "learning_rate": 9.289072080589783e-05, "loss": 3.2285, "step": 98150 }, { "epoch": 0.8672000565181299, "grad_norm": 1.6624693870544434, "learning_rate": 9.288357707529297e-05, "loss": 3.1948, "step": 98200 }, { "epoch": 0.8676416044084141, "grad_norm": 2.4642486572265625, "learning_rate": 9.287643003229543e-05, "loss": 3.0989, "step": 98250 }, { "epoch": 0.8680831522986984, "grad_norm": 2.3431122303009033, "learning_rate": 9.286927967745725e-05, "loss": 3.0297, "step": 98300 }, { "epoch": 0.8685247001889825, "grad_norm": 1.7271332740783691, "learning_rate": 9.286212601133076e-05, "loss": 3.7604, "step": 98350 }, { "epoch": 0.8689662480792667, "grad_norm": 2.332041025161743, "learning_rate": 9.285496903446846e-05, "loss": 3.4561, "step": 98400 }, { "epoch": 0.8694077959695509, "grad_norm": 1.6665946245193481, "learning_rate": 9.284780874742321e-05, "loss": 3.2693, "step": 98450 }, { "epoch": 0.869849343859835, "grad_norm": 1.6122417449951172, "learning_rate": 9.284064515074809e-05, "loss": 3.2516, "step": 98500 }, { "epoch": 0.8702908917501192, "grad_norm": 0.9736064672470093, "learning_rate": 9.283347824499639e-05, "loss": 3.3643, "step": 98550 }, { "epoch": 0.8707324396404034, "grad_norm": 2.6523666381835938, "learning_rate": 9.282630803072173e-05, "loss": 3.4379, "step": 98600 }, { "epoch": 0.8711739875306875, "grad_norm": 3.84800386428833, "learning_rate": 9.281913450847792e-05, "loss": 3.6332, "step": 98650 }, { "epoch": 0.8716155354209718, "grad_norm": 1.0476601123809814, "learning_rate": 9.281195767881908e-05, "loss": 3.1238, "step": 98700 }, { "epoch": 0.872057083311256, "grad_norm": 2.119279623031616, "learning_rate": 9.280477754229952e-05, "loss": 3.4006, "step": 98750 }, { "epoch": 0.8724986312015401, "grad_norm": 1.7416213750839233, "learning_rate": 9.279759409947388e-05, "loss": 3.5637, "step": 98800 }, { "epoch": 0.8729401790918243, "grad_norm": 4.38261079788208, "learning_rate": 9.279040735089702e-05, "loss": 3.233, "step": 98850 }, { "epoch": 0.8733817269821085, "grad_norm": 4.000781059265137, "learning_rate": 9.278321729712403e-05, "loss": 3.5001, "step": 98900 }, { "epoch": 0.8738232748723926, "grad_norm": 2.074897050857544, "learning_rate": 9.27760239387103e-05, "loss": 3.0254, "step": 98950 }, { "epoch": 0.8742648227626768, "grad_norm": 3.6822054386138916, "learning_rate": 9.276882727621146e-05, "loss": 3.2075, "step": 99000 }, { "epoch": 0.8742648227626768, "eval_asr_loss": 0.9122765446783635, "eval_loss": 2.9597482681274414, "eval_runtime": 21.0264, "eval_samples_per_second": 36.525, "eval_steps_per_second": 9.131, "eval_tts_loss": 5.931117558903506, "step": 99000 }, { "epoch": 0.874706370652961, "grad_norm": 2.583610773086548, "learning_rate": 9.276162731018336e-05, "loss": 2.9525, "step": 99050 }, { "epoch": 0.8751479185432453, "grad_norm": 3.8291828632354736, "learning_rate": 9.275442404118217e-05, "loss": 3.5362, "step": 99100 }, { "epoch": 0.8755894664335294, "grad_norm": 1.9839342832565308, "learning_rate": 9.274721746976427e-05, "loss": 3.168, "step": 99150 }, { "epoch": 0.8760310143238136, "grad_norm": 2.494183301925659, "learning_rate": 9.274000759648632e-05, "loss": 3.0813, "step": 99200 }, { "epoch": 0.8764725622140978, "grad_norm": 1.9744882583618164, "learning_rate": 9.273279442190519e-05, "loss": 3.3301, "step": 99250 }, { "epoch": 0.8769141101043819, "grad_norm": 1.0826927423477173, "learning_rate": 9.272557794657805e-05, "loss": 3.2976, "step": 99300 }, { "epoch": 0.8773556579946661, "grad_norm": 3.1494545936584473, "learning_rate": 9.271835817106234e-05, "loss": 3.2733, "step": 99350 }, { "epoch": 0.8777972058849502, "grad_norm": 2.1785781383514404, "learning_rate": 9.271113509591568e-05, "loss": 3.1884, "step": 99400 }, { "epoch": 0.8782387537752344, "grad_norm": 1.9323933124542236, "learning_rate": 9.270390872169602e-05, "loss": 3.4076, "step": 99450 }, { "epoch": 0.8786803016655187, "grad_norm": 2.127812385559082, "learning_rate": 9.269667904896153e-05, "loss": 3.3259, "step": 99500 }, { "epoch": 0.8791218495558029, "grad_norm": 2.901846170425415, "learning_rate": 9.268944607827064e-05, "loss": 3.878, "step": 99550 }, { "epoch": 0.879563397446087, "grad_norm": 2.7331161499023438, "learning_rate": 9.268220981018204e-05, "loss": 3.0213, "step": 99600 }, { "epoch": 0.8800049453363712, "grad_norm": 1.2581075429916382, "learning_rate": 9.267497024525466e-05, "loss": 3.0469, "step": 99650 }, { "epoch": 0.8804464932266554, "grad_norm": 1.963077187538147, "learning_rate": 9.266772738404771e-05, "loss": 3.0544, "step": 99700 }, { "epoch": 0.8808880411169395, "grad_norm": 2.728001594543457, "learning_rate": 9.266048122712064e-05, "loss": 3.0793, "step": 99750 }, { "epoch": 0.8813295890072237, "grad_norm": 2.3829917907714844, "learning_rate": 9.265323177503312e-05, "loss": 3.2839, "step": 99800 }, { "epoch": 0.8817711368975079, "grad_norm": 0.8417502045631409, "learning_rate": 9.264597902834515e-05, "loss": 3.1644, "step": 99850 }, { "epoch": 0.8822126847877921, "grad_norm": 3.0022170543670654, "learning_rate": 9.263872298761693e-05, "loss": 3.4081, "step": 99900 }, { "epoch": 0.8826542326780763, "grad_norm": 5.986268043518066, "learning_rate": 9.263146365340891e-05, "loss": 3.1996, "step": 99950 }, { "epoch": 0.8830957805683605, "grad_norm": 2.8962159156799316, "learning_rate": 9.262420102628184e-05, "loss": 3.1852, "step": 100000 }, { "epoch": 0.8835373284586446, "grad_norm": 2.588916540145874, "learning_rate": 9.261693510679668e-05, "loss": 3.1378, "step": 100050 }, { "epoch": 0.8839788763489288, "grad_norm": 2.7760961055755615, "learning_rate": 9.260966589551468e-05, "loss": 3.2112, "step": 100100 }, { "epoch": 0.884420424239213, "grad_norm": 1.5340514183044434, "learning_rate": 9.260239339299727e-05, "loss": 3.5164, "step": 100150 }, { "epoch": 0.8848619721294971, "grad_norm": 9.54226303100586, "learning_rate": 9.259511759980625e-05, "loss": 3.5849, "step": 100200 }, { "epoch": 0.8853035200197813, "grad_norm": 1.6671772003173828, "learning_rate": 9.258783851650361e-05, "loss": 3.4115, "step": 100250 }, { "epoch": 0.8857450679100656, "grad_norm": 1.8623788356781006, "learning_rate": 9.258055614365156e-05, "loss": 3.3499, "step": 100300 }, { "epoch": 0.8861866158003497, "grad_norm": 1.5677406787872314, "learning_rate": 9.257327048181262e-05, "loss": 3.2248, "step": 100350 }, { "epoch": 0.8866281636906339, "grad_norm": 1.939414381980896, "learning_rate": 9.256598153154954e-05, "loss": 3.419, "step": 100400 }, { "epoch": 0.8870697115809181, "grad_norm": 2.109490394592285, "learning_rate": 9.255883517040522e-05, "loss": 3.5276, "step": 100450 }, { "epoch": 0.8875112594712022, "grad_norm": 3.756086826324463, "learning_rate": 9.255153971072361e-05, "loss": 3.6449, "step": 100500 }, { "epoch": 0.8879528073614864, "grad_norm": 3.2874066829681396, "learning_rate": 9.254424096429637e-05, "loss": 3.452, "step": 100550 }, { "epoch": 0.8883943552517706, "grad_norm": 1.6661789417266846, "learning_rate": 9.253693893168729e-05, "loss": 3.2744, "step": 100600 }, { "epoch": 0.8888359031420547, "grad_norm": 3.6185531616210938, "learning_rate": 9.252963361346036e-05, "loss": 2.9505, "step": 100650 }, { "epoch": 0.889277451032339, "grad_norm": 5.086017608642578, "learning_rate": 9.25223250101799e-05, "loss": 2.9579, "step": 100700 }, { "epoch": 0.8897189989226232, "grad_norm": 3.7332069873809814, "learning_rate": 9.25150131224104e-05, "loss": 3.1295, "step": 100750 }, { "epoch": 0.8901605468129073, "grad_norm": 3.6353108882904053, "learning_rate": 9.250769795071666e-05, "loss": 3.6284, "step": 100800 }, { "epoch": 0.8906020947031915, "grad_norm": 6.214040279388428, "learning_rate": 9.250037949566368e-05, "loss": 2.6618, "step": 100850 }, { "epoch": 0.8910436425934757, "grad_norm": 3.569542169570923, "learning_rate": 9.249305775781681e-05, "loss": 3.5992, "step": 100900 }, { "epoch": 0.8914851904837598, "grad_norm": 2.4265153408050537, "learning_rate": 9.248573273774154e-05, "loss": 3.8598, "step": 100950 }, { "epoch": 0.891926738374044, "grad_norm": 2.2118725776672363, "learning_rate": 9.247840443600368e-05, "loss": 2.7226, "step": 101000 }, { "epoch": 0.8923682862643282, "grad_norm": 1.527198076248169, "learning_rate": 9.24710728531693e-05, "loss": 3.3547, "step": 101050 }, { "epoch": 0.8928098341546125, "grad_norm": 2.138624668121338, "learning_rate": 9.246373798980468e-05, "loss": 3.5805, "step": 101100 }, { "epoch": 0.8932513820448966, "grad_norm": 3.642125129699707, "learning_rate": 9.245639984647639e-05, "loss": 3.2457, "step": 101150 }, { "epoch": 0.8936929299351808, "grad_norm": 1.057465672492981, "learning_rate": 9.244905842375122e-05, "loss": 3.2628, "step": 101200 }, { "epoch": 0.894134477825465, "grad_norm": 1.685719609260559, "learning_rate": 9.244171372219626e-05, "loss": 3.8842, "step": 101250 }, { "epoch": 0.8945760257157491, "grad_norm": 1.3749964237213135, "learning_rate": 9.243436574237878e-05, "loss": 3.0899, "step": 101300 }, { "epoch": 0.8950175736060333, "grad_norm": 2.584540367126465, "learning_rate": 9.24270144848664e-05, "loss": 3.6703, "step": 101350 }, { "epoch": 0.8954591214963175, "grad_norm": 3.092801570892334, "learning_rate": 9.241965995022692e-05, "loss": 3.1755, "step": 101400 }, { "epoch": 0.8959006693866016, "grad_norm": 1.243997573852539, "learning_rate": 9.241230213902842e-05, "loss": 2.8785, "step": 101450 }, { "epoch": 0.8963422172768859, "grad_norm": 4.314537048339844, "learning_rate": 9.240494105183921e-05, "loss": 3.3101, "step": 101500 }, { "epoch": 0.8967837651671701, "grad_norm": 2.9184675216674805, "learning_rate": 9.239757668922791e-05, "loss": 3.318, "step": 101550 }, { "epoch": 0.8972253130574542, "grad_norm": 2.036470890045166, "learning_rate": 9.239020905176331e-05, "loss": 3.1143, "step": 101600 }, { "epoch": 0.8976668609477384, "grad_norm": 4.395949363708496, "learning_rate": 9.238283814001454e-05, "loss": 3.2825, "step": 101650 }, { "epoch": 0.8981084088380226, "grad_norm": 2.0808677673339844, "learning_rate": 9.23754639545509e-05, "loss": 3.7597, "step": 101700 }, { "epoch": 0.8985499567283067, "grad_norm": 1.0310138463974, "learning_rate": 9.236808649594202e-05, "loss": 3.3764, "step": 101750 }, { "epoch": 0.8989915046185909, "grad_norm": 1.490734338760376, "learning_rate": 9.236070576475773e-05, "loss": 3.3401, "step": 101800 }, { "epoch": 0.8994330525088751, "grad_norm": 1.4883259534835815, "learning_rate": 9.235332176156812e-05, "loss": 3.3028, "step": 101850 }, { "epoch": 0.8998746003991593, "grad_norm": 1.0461546182632446, "learning_rate": 9.234593448694354e-05, "loss": 3.007, "step": 101900 }, { "epoch": 0.9003161482894435, "grad_norm": 1.0872769355773926, "learning_rate": 9.233854394145462e-05, "loss": 3.1332, "step": 101950 }, { "epoch": 0.9007576961797277, "grad_norm": 4.982665061950684, "learning_rate": 9.23311501256722e-05, "loss": 3.3656, "step": 102000 }, { "epoch": 0.9007576961797277, "eval_asr_loss": 0.903455578837743, "eval_loss": 2.968921661376953, "eval_runtime": 20.4043, "eval_samples_per_second": 37.639, "eval_steps_per_second": 9.41, "eval_tts_loss": 5.955756151524818, "step": 102000 }, { "epoch": 0.9011992440700118, "grad_norm": 1.7485255002975464, "learning_rate": 9.232375304016738e-05, "loss": 3.2068, "step": 102050 }, { "epoch": 0.901640791960296, "grad_norm": 2.655881643295288, "learning_rate": 9.231635268551153e-05, "loss": 3.2794, "step": 102100 }, { "epoch": 0.9020823398505802, "grad_norm": 1.1869397163391113, "learning_rate": 9.230894906227628e-05, "loss": 3.6545, "step": 102150 }, { "epoch": 0.9025238877408643, "grad_norm": 1.75388765335083, "learning_rate": 9.23016903408811e-05, "loss": 3.4393, "step": 102200 }, { "epoch": 0.9029654356311485, "grad_norm": 2.8003134727478027, "learning_rate": 9.229428024754599e-05, "loss": 3.6308, "step": 102250 }, { "epoch": 0.9034069835214328, "grad_norm": 2.303340196609497, "learning_rate": 9.228686688733635e-05, "loss": 3.4285, "step": 102300 }, { "epoch": 0.903848531411717, "grad_norm": 1.432195782661438, "learning_rate": 9.227945026082483e-05, "loss": 3.0429, "step": 102350 }, { "epoch": 0.9042900793020011, "grad_norm": 3.9534683227539062, "learning_rate": 9.227203036858429e-05, "loss": 3.385, "step": 102400 }, { "epoch": 0.9047316271922853, "grad_norm": 1.3392046689987183, "learning_rate": 9.226460721118788e-05, "loss": 2.9859, "step": 102450 }, { "epoch": 0.9051731750825694, "grad_norm": 1.7585742473602295, "learning_rate": 9.225718078920896e-05, "loss": 2.8935, "step": 102500 }, { "epoch": 0.9056147229728536, "grad_norm": 1.3776382207870483, "learning_rate": 9.224975110322113e-05, "loss": 3.2253, "step": 102550 }, { "epoch": 0.9060562708631378, "grad_norm": 0.8360838294029236, "learning_rate": 9.22423181537983e-05, "loss": 2.9125, "step": 102600 }, { "epoch": 0.906497818753422, "grad_norm": 2.6101293563842773, "learning_rate": 9.223488194151461e-05, "loss": 3.0466, "step": 102650 }, { "epoch": 0.9069393666437062, "grad_norm": 2.8164517879486084, "learning_rate": 9.222744246694441e-05, "loss": 3.3639, "step": 102700 }, { "epoch": 0.9073809145339904, "grad_norm": 2.205768585205078, "learning_rate": 9.221999973066238e-05, "loss": 3.3834, "step": 102750 }, { "epoch": 0.9078224624242746, "grad_norm": 3.8350329399108887, "learning_rate": 9.221255373324338e-05, "loss": 2.7993, "step": 102800 }, { "epoch": 0.9082640103145587, "grad_norm": 3.8770978450775146, "learning_rate": 9.220510447526256e-05, "loss": 2.9693, "step": 102850 }, { "epoch": 0.9087055582048429, "grad_norm": 2.760380983352661, "learning_rate": 9.219765195729529e-05, "loss": 3.5722, "step": 102900 }, { "epoch": 0.9091471060951271, "grad_norm": 3.3382771015167236, "learning_rate": 9.219019617991724e-05, "loss": 3.5588, "step": 102950 }, { "epoch": 0.9095886539854112, "grad_norm": 2.2112584114074707, "learning_rate": 9.218273714370432e-05, "loss": 3.3712, "step": 103000 }, { "epoch": 0.9100302018756954, "grad_norm": 2.458007335662842, "learning_rate": 9.217527484923262e-05, "loss": 2.5991, "step": 103050 }, { "epoch": 0.9104717497659797, "grad_norm": 2.0607690811157227, "learning_rate": 9.21678092970786e-05, "loss": 3.4419, "step": 103100 }, { "epoch": 0.9109132976562638, "grad_norm": 3.3169896602630615, "learning_rate": 9.216034048781887e-05, "loss": 2.8659, "step": 103150 }, { "epoch": 0.911354845546548, "grad_norm": 2.7883317470550537, "learning_rate": 9.215286842203035e-05, "loss": 3.6627, "step": 103200 }, { "epoch": 0.9117963934368322, "grad_norm": 1.9193239212036133, "learning_rate": 9.21453931002902e-05, "loss": 3.2209, "step": 103250 }, { "epoch": 0.9122379413271163, "grad_norm": 1.5039438009262085, "learning_rate": 9.213791452317579e-05, "loss": 3.2848, "step": 103300 }, { "epoch": 0.9126794892174005, "grad_norm": 1.5526306629180908, "learning_rate": 9.213043269126483e-05, "loss": 3.0704, "step": 103350 }, { "epoch": 0.9131210371076847, "grad_norm": 1.0206029415130615, "learning_rate": 9.212294760513518e-05, "loss": 3.675, "step": 103400 }, { "epoch": 0.9135625849979688, "grad_norm": 4.434154510498047, "learning_rate": 9.211545926536504e-05, "loss": 3.4542, "step": 103450 }, { "epoch": 0.9140041328882531, "grad_norm": 2.1419999599456787, "learning_rate": 9.210796767253278e-05, "loss": 3.8326, "step": 103500 }, { "epoch": 0.9144456807785373, "grad_norm": 0.9824464321136475, "learning_rate": 9.210047282721709e-05, "loss": 3.2751, "step": 103550 }, { "epoch": 0.9148872286688214, "grad_norm": 3.2531392574310303, "learning_rate": 9.20929747299969e-05, "loss": 3.1043, "step": 103600 }, { "epoch": 0.9153287765591056, "grad_norm": 7.856770992279053, "learning_rate": 9.208547338145132e-05, "loss": 3.0719, "step": 103650 }, { "epoch": 0.9157703244493898, "grad_norm": 0.7467933893203735, "learning_rate": 9.207796878215979e-05, "loss": 3.6067, "step": 103700 }, { "epoch": 0.9162118723396739, "grad_norm": 11.154623985290527, "learning_rate": 9.2070460932702e-05, "loss": 3.2332, "step": 103750 }, { "epoch": 0.9166534202299581, "grad_norm": 4.158257484436035, "learning_rate": 9.206294983365785e-05, "loss": 3.4292, "step": 103800 }, { "epoch": 0.9170949681202423, "grad_norm": 1.339894413948059, "learning_rate": 9.205543548560751e-05, "loss": 3.1509, "step": 103850 }, { "epoch": 0.9175365160105265, "grad_norm": 1.5442434549331665, "learning_rate": 9.204791788913138e-05, "loss": 3.0922, "step": 103900 }, { "epoch": 0.9179780639008107, "grad_norm": 1.0537227392196655, "learning_rate": 9.204039704481018e-05, "loss": 3.2525, "step": 103950 }, { "epoch": 0.9184196117910949, "grad_norm": 2.2441678047180176, "learning_rate": 9.203287295322478e-05, "loss": 2.9392, "step": 104000 }, { "epoch": 0.918861159681379, "grad_norm": 3.044712543487549, "learning_rate": 9.202534561495637e-05, "loss": 3.0516, "step": 104050 }, { "epoch": 0.9193027075716632, "grad_norm": 1.7708096504211426, "learning_rate": 9.201781503058638e-05, "loss": 3.3234, "step": 104100 }, { "epoch": 0.9197442554619474, "grad_norm": 2.0960426330566406, "learning_rate": 9.201028120069648e-05, "loss": 3.808, "step": 104150 }, { "epoch": 0.9201858033522315, "grad_norm": 6.793859958648682, "learning_rate": 9.20027441258686e-05, "loss": 3.6451, "step": 104200 }, { "epoch": 0.9206273512425157, "grad_norm": 2.1247751712799072, "learning_rate": 9.19952038066849e-05, "loss": 3.6018, "step": 104250 }, { "epoch": 0.9210688991328, "grad_norm": 1.15399169921875, "learning_rate": 9.198766024372783e-05, "loss": 3.3271, "step": 104300 }, { "epoch": 0.9215104470230842, "grad_norm": 1.7735700607299805, "learning_rate": 9.198011343758002e-05, "loss": 3.0006, "step": 104350 }, { "epoch": 0.9219519949133683, "grad_norm": 0.8139469027519226, "learning_rate": 9.197256338882445e-05, "loss": 3.0361, "step": 104400 }, { "epoch": 0.9223935428036525, "grad_norm": 3.3219869136810303, "learning_rate": 9.196501009804429e-05, "loss": 3.1615, "step": 104450 }, { "epoch": 0.9228350906939367, "grad_norm": 2.1719770431518555, "learning_rate": 9.195745356582292e-05, "loss": 3.8578, "step": 104500 }, { "epoch": 0.9232766385842208, "grad_norm": 1.2096954584121704, "learning_rate": 9.194989379274406e-05, "loss": 3.1797, "step": 104550 }, { "epoch": 0.923718186474505, "grad_norm": 1.369635820388794, "learning_rate": 9.194233077939165e-05, "loss": 3.1194, "step": 104600 }, { "epoch": 0.9241597343647892, "grad_norm": 1.7169206142425537, "learning_rate": 9.193476452634983e-05, "loss": 3.3546, "step": 104650 }, { "epoch": 0.9246012822550734, "grad_norm": 5.232157230377197, "learning_rate": 9.192719503420304e-05, "loss": 3.2173, "step": 104700 }, { "epoch": 0.9250428301453576, "grad_norm": 1.7149889469146729, "learning_rate": 9.191962230353598e-05, "loss": 3.0774, "step": 104750 }, { "epoch": 0.9254843780356418, "grad_norm": 1.5316158533096313, "learning_rate": 9.191204633493355e-05, "loss": 3.6406, "step": 104800 }, { "epoch": 0.9259259259259259, "grad_norm": 2.05110239982605, "learning_rate": 9.190446712898097e-05, "loss": 3.3836, "step": 104850 }, { "epoch": 0.9263674738162101, "grad_norm": 3.319016695022583, "learning_rate": 9.189688468626363e-05, "loss": 3.1945, "step": 104900 }, { "epoch": 0.9268090217064943, "grad_norm": 2.672206163406372, "learning_rate": 9.188929900736722e-05, "loss": 3.5687, "step": 104950 }, { "epoch": 0.9272505695967784, "grad_norm": 1.4646509885787964, "learning_rate": 9.188171009287767e-05, "loss": 3.329, "step": 105000 }, { "epoch": 0.9272505695967784, "eval_asr_loss": 0.9064391918799752, "eval_loss": 2.951792001724243, "eval_runtime": 20.8436, "eval_samples_per_second": 36.846, "eval_steps_per_second": 9.211, "eval_tts_loss": 5.888024541957233, "step": 105000 }, { "epoch": 0.9276921174870626, "grad_norm": 8.195231437683105, "learning_rate": 9.187411794338117e-05, "loss": 3.1485, "step": 105050 }, { "epoch": 0.9281336653773469, "grad_norm": 2.3274195194244385, "learning_rate": 9.186652255946413e-05, "loss": 3.4639, "step": 105100 }, { "epoch": 0.928575213267631, "grad_norm": 2.3561298847198486, "learning_rate": 9.185892394171325e-05, "loss": 2.9877, "step": 105150 }, { "epoch": 0.9290167611579152, "grad_norm": 2.7232580184936523, "learning_rate": 9.185132209071545e-05, "loss": 3.1942, "step": 105200 }, { "epoch": 0.9294583090481994, "grad_norm": 2.004868268966675, "learning_rate": 9.18437170070579e-05, "loss": 3.271, "step": 105250 }, { "epoch": 0.9298998569384835, "grad_norm": 1.0723556280136108, "learning_rate": 9.183610869132804e-05, "loss": 3.7162, "step": 105300 }, { "epoch": 0.9303414048287677, "grad_norm": 2.3168883323669434, "learning_rate": 9.182849714411354e-05, "loss": 3.3603, "step": 105350 }, { "epoch": 0.9307829527190519, "grad_norm": 3.932926654815674, "learning_rate": 9.182088236600235e-05, "loss": 3.2771, "step": 105400 }, { "epoch": 0.931224500609336, "grad_norm": 2.3578920364379883, "learning_rate": 9.18132643575826e-05, "loss": 3.2315, "step": 105450 }, { "epoch": 0.9316660484996203, "grad_norm": 2.431108236312866, "learning_rate": 9.180564311944276e-05, "loss": 3.5825, "step": 105500 }, { "epoch": 0.9321075963899045, "grad_norm": 1.2201814651489258, "learning_rate": 9.179801865217149e-05, "loss": 3.2987, "step": 105550 }, { "epoch": 0.9325491442801886, "grad_norm": 3.136932134628296, "learning_rate": 9.17903909563577e-05, "loss": 3.5143, "step": 105600 }, { "epoch": 0.9329906921704728, "grad_norm": 1.9136083126068115, "learning_rate": 9.178276003259061e-05, "loss": 2.8707, "step": 105650 }, { "epoch": 0.933432240060757, "grad_norm": 3.5375232696533203, "learning_rate": 9.177512588145959e-05, "loss": 3.5015, "step": 105700 }, { "epoch": 0.9338737879510411, "grad_norm": 3.3134589195251465, "learning_rate": 9.176748850355434e-05, "loss": 3.313, "step": 105750 }, { "epoch": 0.9343153358413253, "grad_norm": 4.367105484008789, "learning_rate": 9.175984789946479e-05, "loss": 3.431, "step": 105800 }, { "epoch": 0.9347568837316095, "grad_norm": 0.8827017545700073, "learning_rate": 9.17522040697811e-05, "loss": 3.2357, "step": 105850 }, { "epoch": 0.9351984316218938, "grad_norm": 2.8383736610412598, "learning_rate": 9.17445570150937e-05, "loss": 3.4118, "step": 105900 }, { "epoch": 0.9356399795121779, "grad_norm": 2.147942543029785, "learning_rate": 9.173690673599325e-05, "loss": 3.4423, "step": 105950 }, { "epoch": 0.9360815274024621, "grad_norm": 0.8061859607696533, "learning_rate": 9.172925323307066e-05, "loss": 2.889, "step": 106000 }, { "epoch": 0.9365230752927463, "grad_norm": 2.103886127471924, "learning_rate": 9.172159650691714e-05, "loss": 3.1968, "step": 106050 }, { "epoch": 0.9369646231830304, "grad_norm": 1.432693362236023, "learning_rate": 9.171393655812406e-05, "loss": 3.4562, "step": 106100 }, { "epoch": 0.9374061710733146, "grad_norm": 1.1502761840820312, "learning_rate": 9.17062733872831e-05, "loss": 3.4893, "step": 106150 }, { "epoch": 0.9378477189635988, "grad_norm": 2.3663687705993652, "learning_rate": 9.169860699498619e-05, "loss": 3.1563, "step": 106200 }, { "epoch": 0.9382892668538829, "grad_norm": 1.9978609085083008, "learning_rate": 9.169093738182548e-05, "loss": 3.0778, "step": 106250 }, { "epoch": 0.9387308147441672, "grad_norm": 1.2889201641082764, "learning_rate": 9.168326454839339e-05, "loss": 3.3576, "step": 106300 }, { "epoch": 0.9391723626344514, "grad_norm": 1.1832932233810425, "learning_rate": 9.167558849528257e-05, "loss": 2.9429, "step": 106350 }, { "epoch": 0.9396139105247355, "grad_norm": 1.980592966079712, "learning_rate": 9.166790922308594e-05, "loss": 3.0273, "step": 106400 }, { "epoch": 0.9400554584150197, "grad_norm": 5.848330020904541, "learning_rate": 9.166038041374784e-05, "loss": 3.5707, "step": 106450 }, { "epoch": 0.9404970063053039, "grad_norm": 1.3137308359146118, "learning_rate": 9.165269476951148e-05, "loss": 3.1561, "step": 106500 }, { "epoch": 0.940938554195588, "grad_norm": 2.650174617767334, "learning_rate": 9.164500590795764e-05, "loss": 3.3189, "step": 106550 }, { "epoch": 0.9413801020858722, "grad_norm": 7.973121643066406, "learning_rate": 9.163731382968025e-05, "loss": 2.9166, "step": 106600 }, { "epoch": 0.9418216499761564, "grad_norm": 3.19098162651062, "learning_rate": 9.162961853527343e-05, "loss": 3.4358, "step": 106650 }, { "epoch": 0.9422631978664406, "grad_norm": 2.6148929595947266, "learning_rate": 9.16219200253316e-05, "loss": 3.0046, "step": 106700 }, { "epoch": 0.9427047457567248, "grad_norm": 2.952760696411133, "learning_rate": 9.161421830044938e-05, "loss": 3.4446, "step": 106750 }, { "epoch": 0.943146293647009, "grad_norm": 1.8998844623565674, "learning_rate": 9.160651336122169e-05, "loss": 3.002, "step": 106800 }, { "epoch": 0.9435878415372931, "grad_norm": 3.6690402030944824, "learning_rate": 9.159880520824364e-05, "loss": 3.2972, "step": 106850 }, { "epoch": 0.9440293894275773, "grad_norm": 1.2669631242752075, "learning_rate": 9.159109384211065e-05, "loss": 3.7589, "step": 106900 }, { "epoch": 0.9444709373178615, "grad_norm": 2.6425857543945312, "learning_rate": 9.158337926341833e-05, "loss": 3.1515, "step": 106950 }, { "epoch": 0.9449124852081456, "grad_norm": 4.218154430389404, "learning_rate": 9.157566147276258e-05, "loss": 2.7466, "step": 107000 }, { "epoch": 0.9453540330984298, "grad_norm": 1.4164811372756958, "learning_rate": 9.156794047073953e-05, "loss": 3.019, "step": 107050 }, { "epoch": 0.9457955809887141, "grad_norm": 2.938023090362549, "learning_rate": 9.156021625794556e-05, "loss": 3.3921, "step": 107100 }, { "epoch": 0.9462371288789982, "grad_norm": 2.360468864440918, "learning_rate": 9.155248883497732e-05, "loss": 2.9222, "step": 107150 }, { "epoch": 0.9466786767692824, "grad_norm": 2.1282787322998047, "learning_rate": 9.154475820243167e-05, "loss": 3.0558, "step": 107200 }, { "epoch": 0.9471202246595666, "grad_norm": 3.2383251190185547, "learning_rate": 9.153702436090572e-05, "loss": 3.1759, "step": 107250 }, { "epoch": 0.9475617725498507, "grad_norm": 1.6410382986068726, "learning_rate": 9.152928731099688e-05, "loss": 3.0554, "step": 107300 }, { "epoch": 0.9480033204401349, "grad_norm": 1.9488804340362549, "learning_rate": 9.152154705330274e-05, "loss": 2.8708, "step": 107350 }, { "epoch": 0.9484448683304191, "grad_norm": 0.915654182434082, "learning_rate": 9.15138035884212e-05, "loss": 2.926, "step": 107400 }, { "epoch": 0.9488864162207032, "grad_norm": 5.75622034072876, "learning_rate": 9.150605691695036e-05, "loss": 3.4072, "step": 107450 }, { "epoch": 0.9493279641109875, "grad_norm": 1.4961851835250854, "learning_rate": 9.149830703948856e-05, "loss": 3.4192, "step": 107500 }, { "epoch": 0.9497695120012717, "grad_norm": 2.9729695320129395, "learning_rate": 9.149055395663446e-05, "loss": 3.0369, "step": 107550 }, { "epoch": 0.9502110598915559, "grad_norm": 3.2683751583099365, "learning_rate": 9.148279766898688e-05, "loss": 3.4268, "step": 107600 }, { "epoch": 0.95065260778184, "grad_norm": 3.0689873695373535, "learning_rate": 9.147503817714496e-05, "loss": 3.1971, "step": 107650 }, { "epoch": 0.9510941556721242, "grad_norm": 7.556708335876465, "learning_rate": 9.146727548170803e-05, "loss": 3.5455, "step": 107700 }, { "epoch": 0.9515357035624084, "grad_norm": 2.019350528717041, "learning_rate": 9.145950958327569e-05, "loss": 3.5969, "step": 107750 }, { "epoch": 0.9519772514526925, "grad_norm": 1.1041678190231323, "learning_rate": 9.145174048244782e-05, "loss": 3.1785, "step": 107800 }, { "epoch": 0.9524187993429767, "grad_norm": 1.0742287635803223, "learning_rate": 9.144396817982449e-05, "loss": 3.0938, "step": 107850 }, { "epoch": 0.952860347233261, "grad_norm": 3.854574203491211, "learning_rate": 9.143619267600605e-05, "loss": 2.8083, "step": 107900 }, { "epoch": 0.9533018951235451, "grad_norm": 2.129457473754883, "learning_rate": 9.14284139715931e-05, "loss": 3.2925, "step": 107950 }, { "epoch": 0.9537434430138293, "grad_norm": 4.399311542510986, "learning_rate": 9.142063206718647e-05, "loss": 3.5332, "step": 108000 }, { "epoch": 0.9537434430138293, "eval_asr_loss": 0.9242537100154695, "eval_loss": 2.951432466506958, "eval_runtime": 20.2039, "eval_samples_per_second": 38.012, "eval_steps_per_second": 9.503, "eval_tts_loss": 5.889301824693111, "step": 108000 }, { "epoch": 0.9541849909041135, "grad_norm": 2.395542860031128, "learning_rate": 9.141284696338725e-05, "loss": 3.7637, "step": 108050 }, { "epoch": 0.9546265387943976, "grad_norm": 5.644167900085449, "learning_rate": 9.140505866079678e-05, "loss": 3.4879, "step": 108100 }, { "epoch": 0.9550680866846818, "grad_norm": 2.9684510231018066, "learning_rate": 9.139726716001662e-05, "loss": 3.2029, "step": 108150 }, { "epoch": 0.955509634574966, "grad_norm": 2.128922939300537, "learning_rate": 9.138947246164862e-05, "loss": 3.6179, "step": 108200 }, { "epoch": 0.9559511824652501, "grad_norm": 1.3739852905273438, "learning_rate": 9.138167456629486e-05, "loss": 3.5268, "step": 108250 }, { "epoch": 0.9563927303555344, "grad_norm": 1.0485423803329468, "learning_rate": 9.137387347455761e-05, "loss": 3.3022, "step": 108300 }, { "epoch": 0.9568342782458186, "grad_norm": 2.3977785110473633, "learning_rate": 9.13660691870395e-05, "loss": 3.52, "step": 108350 }, { "epoch": 0.9572758261361027, "grad_norm": 3.1857287883758545, "learning_rate": 9.135826170434331e-05, "loss": 3.6409, "step": 108400 }, { "epoch": 0.9577173740263869, "grad_norm": 1.380523920059204, "learning_rate": 9.135045102707212e-05, "loss": 3.4588, "step": 108450 }, { "epoch": 0.9581589219166711, "grad_norm": 2.0876681804656982, "learning_rate": 9.134263715582922e-05, "loss": 3.5122, "step": 108500 }, { "epoch": 0.9586004698069552, "grad_norm": 1.6372121572494507, "learning_rate": 9.133482009121816e-05, "loss": 2.4488, "step": 108550 }, { "epoch": 0.9590420176972394, "grad_norm": 1.7906489372253418, "learning_rate": 9.132699983384278e-05, "loss": 3.528, "step": 108600 }, { "epoch": 0.9594835655875236, "grad_norm": 3.648322582244873, "learning_rate": 9.13191763843071e-05, "loss": 3.2061, "step": 108650 }, { "epoch": 0.9599251134778078, "grad_norm": 2.764986991882324, "learning_rate": 9.131134974321542e-05, "loss": 3.2878, "step": 108700 }, { "epoch": 0.960366661368092, "grad_norm": 2.228464365005493, "learning_rate": 9.130351991117229e-05, "loss": 3.1307, "step": 108750 }, { "epoch": 0.9608082092583762, "grad_norm": 1.0917402505874634, "learning_rate": 9.129568688878248e-05, "loss": 2.9365, "step": 108800 }, { "epoch": 0.9612497571486603, "grad_norm": 0.8567925095558167, "learning_rate": 9.128785067665104e-05, "loss": 3.3896, "step": 108850 }, { "epoch": 0.9616913050389445, "grad_norm": 0.9180238246917725, "learning_rate": 9.128001127538325e-05, "loss": 3.7838, "step": 108900 }, { "epoch": 0.9621328529292287, "grad_norm": 6.334617614746094, "learning_rate": 9.127216868558463e-05, "loss": 3.0286, "step": 108950 }, { "epoch": 0.9625744008195128, "grad_norm": 1.8890008926391602, "learning_rate": 9.126432290786096e-05, "loss": 3.2409, "step": 109000 }, { "epoch": 0.963015948709797, "grad_norm": 1.5762968063354492, "learning_rate": 9.125647394281824e-05, "loss": 3.1578, "step": 109050 }, { "epoch": 0.9634574966000813, "grad_norm": 1.3965164422988892, "learning_rate": 9.124862179106278e-05, "loss": 3.3663, "step": 109100 }, { "epoch": 0.9638990444903655, "grad_norm": 2.264342784881592, "learning_rate": 9.124076645320104e-05, "loss": 3.2378, "step": 109150 }, { "epoch": 0.9643405923806496, "grad_norm": 2.7489473819732666, "learning_rate": 9.12329079298398e-05, "loss": 3.5606, "step": 109200 }, { "epoch": 0.9647821402709338, "grad_norm": 1.9388216733932495, "learning_rate": 9.122504622158608e-05, "loss": 3.2344, "step": 109250 }, { "epoch": 0.965223688161218, "grad_norm": 1.1815500259399414, "learning_rate": 9.121733865809997e-05, "loss": 2.8723, "step": 109300 }, { "epoch": 0.9656652360515021, "grad_norm": 0.5941036343574524, "learning_rate": 9.120947064555085e-05, "loss": 3.0666, "step": 109350 }, { "epoch": 0.9661067839417863, "grad_norm": 2.4697368144989014, "learning_rate": 9.120159944991959e-05, "loss": 3.2941, "step": 109400 }, { "epoch": 0.9665483318320705, "grad_norm": 2.9544789791107178, "learning_rate": 9.119372507181415e-05, "loss": 3.0451, "step": 109450 }, { "epoch": 0.9669898797223547, "grad_norm": 4.041789531707764, "learning_rate": 9.118584751184273e-05, "loss": 3.1865, "step": 109500 }, { "epoch": 0.9674314276126389, "grad_norm": 0.4819214344024658, "learning_rate": 9.117796677061387e-05, "loss": 3.2453, "step": 109550 }, { "epoch": 0.9678729755029231, "grad_norm": 3.580658197402954, "learning_rate": 9.117008284873625e-05, "loss": 2.897, "step": 109600 }, { "epoch": 0.9683145233932072, "grad_norm": 1.2903705835342407, "learning_rate": 9.116219574681882e-05, "loss": 3.5748, "step": 109650 }, { "epoch": 0.9687560712834914, "grad_norm": 2.910269260406494, "learning_rate": 9.115430546547083e-05, "loss": 3.1729, "step": 109700 }, { "epoch": 0.9691976191737756, "grad_norm": 2.1742489337921143, "learning_rate": 9.114641200530171e-05, "loss": 3.8506, "step": 109750 }, { "epoch": 0.9696391670640597, "grad_norm": 1.4163600206375122, "learning_rate": 9.11385153669212e-05, "loss": 3.486, "step": 109800 }, { "epoch": 0.970080714954344, "grad_norm": 2.7012150287628174, "learning_rate": 9.11306155509392e-05, "loss": 3.0437, "step": 109850 }, { "epoch": 0.9705222628446282, "grad_norm": 3.0786678791046143, "learning_rate": 9.112271255796593e-05, "loss": 2.9614, "step": 109900 }, { "epoch": 0.9709638107349123, "grad_norm": 2.5239641666412354, "learning_rate": 9.111480638861183e-05, "loss": 2.8935, "step": 109950 }, { "epoch": 0.9714053586251965, "grad_norm": 1.9971002340316772, "learning_rate": 9.110689704348758e-05, "loss": 3.0526, "step": 110000 }, { "epoch": 0.9718469065154807, "grad_norm": 2.921504259109497, "learning_rate": 9.10989845232041e-05, "loss": 2.9988, "step": 110050 }, { "epoch": 0.9722884544057648, "grad_norm": 3.520411491394043, "learning_rate": 9.109106882837259e-05, "loss": 2.7144, "step": 110100 }, { "epoch": 0.972730002296049, "grad_norm": 11.24007511138916, "learning_rate": 9.108314995960445e-05, "loss": 3.398, "step": 110150 }, { "epoch": 0.9731715501863332, "grad_norm": 1.7013554573059082, "learning_rate": 9.107522791751135e-05, "loss": 3.3744, "step": 110200 }, { "epoch": 0.9736130980766174, "grad_norm": 5.85800313949585, "learning_rate": 9.106746123808995e-05, "loss": 2.8389, "step": 110250 }, { "epoch": 0.9740546459669016, "grad_norm": 0.44112929701805115, "learning_rate": 9.105953291461892e-05, "loss": 3.5296, "step": 110300 }, { "epoch": 0.9744961938571858, "grad_norm": 1.3203797340393066, "learning_rate": 9.105160141964715e-05, "loss": 3.5493, "step": 110350 }, { "epoch": 0.9749377417474699, "grad_norm": 2.4732491970062256, "learning_rate": 9.104366675378729e-05, "loss": 3.4198, "step": 110400 }, { "epoch": 0.9753792896377541, "grad_norm": 3.4213356971740723, "learning_rate": 9.10357289176522e-05, "loss": 3.2578, "step": 110450 }, { "epoch": 0.9758208375280383, "grad_norm": 2.482844352722168, "learning_rate": 9.102778791185502e-05, "loss": 3.2519, "step": 110500 }, { "epoch": 0.9762623854183224, "grad_norm": 2.138728380203247, "learning_rate": 9.101984373700912e-05, "loss": 3.0722, "step": 110550 }, { "epoch": 0.9767039333086066, "grad_norm": 1.6257545948028564, "learning_rate": 9.101189639372815e-05, "loss": 2.7877, "step": 110600 }, { "epoch": 0.9771454811988909, "grad_norm": 1.8852672576904297, "learning_rate": 9.100394588262595e-05, "loss": 3.0108, "step": 110650 }, { "epoch": 0.977587029089175, "grad_norm": 4.819422245025635, "learning_rate": 9.099599220431662e-05, "loss": 3.0413, "step": 110700 }, { "epoch": 0.9780285769794592, "grad_norm": 1.705790400505066, "learning_rate": 9.098803535941456e-05, "loss": 3.0909, "step": 110750 }, { "epoch": 0.9784701248697434, "grad_norm": 2.922133445739746, "learning_rate": 9.098007534853431e-05, "loss": 3.318, "step": 110800 }, { "epoch": 0.9789116727600276, "grad_norm": 0.9488845467567444, "learning_rate": 9.097211217229074e-05, "loss": 3.0227, "step": 110850 }, { "epoch": 0.9793532206503117, "grad_norm": 1.900322675704956, "learning_rate": 9.096414583129895e-05, "loss": 2.8965, "step": 110900 }, { "epoch": 0.9797947685405959, "grad_norm": 4.18387508392334, "learning_rate": 9.095617632617426e-05, "loss": 2.5843, "step": 110950 }, { "epoch": 0.98023631643088, "grad_norm": 4.1672444343566895, "learning_rate": 9.094820365753224e-05, "loss": 3.7352, "step": 111000 }, { "epoch": 0.98023631643088, "eval_asr_loss": 0.9174097384220402, "eval_loss": 2.9519290924072266, "eval_runtime": 20.6953, "eval_samples_per_second": 37.11, "eval_steps_per_second": 9.277, "eval_tts_loss": 5.933492346852046, "step": 111000 }, { "epoch": 0.9806778643211643, "grad_norm": 1.7948575019836426, "learning_rate": 9.094022782598873e-05, "loss": 3.3024, "step": 111050 }, { "epoch": 0.9811194122114485, "grad_norm": 1.7361973524093628, "learning_rate": 9.093224883215975e-05, "loss": 3.2393, "step": 111100 }, { "epoch": 0.9815609601017327, "grad_norm": 3.170607805252075, "learning_rate": 9.092426667666167e-05, "loss": 2.9186, "step": 111150 }, { "epoch": 0.9820025079920168, "grad_norm": 2.6592564582824707, "learning_rate": 9.091628136011103e-05, "loss": 3.1093, "step": 111200 }, { "epoch": 0.982444055882301, "grad_norm": 3.301297187805176, "learning_rate": 9.090829288312459e-05, "loss": 3.4304, "step": 111250 }, { "epoch": 0.9828856037725852, "grad_norm": 1.6504911184310913, "learning_rate": 9.090030124631943e-05, "loss": 2.9901, "step": 111300 }, { "epoch": 0.9833271516628693, "grad_norm": 1.7255781888961792, "learning_rate": 9.089230645031281e-05, "loss": 3.1932, "step": 111350 }, { "epoch": 0.9837686995531535, "grad_norm": 1.882737159729004, "learning_rate": 9.088446848576421e-05, "loss": 3.1261, "step": 111400 }, { "epoch": 0.9842102474434378, "grad_norm": 2.1880311965942383, "learning_rate": 9.087646743636082e-05, "loss": 2.9171, "step": 111450 }, { "epoch": 0.9846517953337219, "grad_norm": 2.5656466484069824, "learning_rate": 9.086846322959693e-05, "loss": 3.2527, "step": 111500 }, { "epoch": 0.9850933432240061, "grad_norm": 1.9035078287124634, "learning_rate": 9.086045586609081e-05, "loss": 2.9598, "step": 111550 }, { "epoch": 0.9855348911142903, "grad_norm": 2.160431385040283, "learning_rate": 9.085244534646095e-05, "loss": 2.8841, "step": 111600 }, { "epoch": 0.9859764390045744, "grad_norm": 1.5755068063735962, "learning_rate": 9.084443167132613e-05, "loss": 3.3504, "step": 111650 }, { "epoch": 0.9864179868948586, "grad_norm": 1.7207096815109253, "learning_rate": 9.083641484130529e-05, "loss": 3.3112, "step": 111700 }, { "epoch": 0.9868595347851428, "grad_norm": 2.6062755584716797, "learning_rate": 9.082839485701771e-05, "loss": 3.3106, "step": 111750 }, { "epoch": 0.9873010826754269, "grad_norm": 2.2912654876708984, "learning_rate": 9.082037171908282e-05, "loss": 3.0499, "step": 111800 }, { "epoch": 0.9877426305657112, "grad_norm": 1.4460901021957397, "learning_rate": 9.081234542812038e-05, "loss": 3.7295, "step": 111850 }, { "epoch": 0.9881841784559954, "grad_norm": 1.997914433479309, "learning_rate": 9.080431598475032e-05, "loss": 3.2918, "step": 111900 }, { "epoch": 0.9886257263462795, "grad_norm": 2.2903008460998535, "learning_rate": 9.079628338959286e-05, "loss": 3.5094, "step": 111950 }, { "epoch": 0.9890672742365637, "grad_norm": 2.559363603591919, "learning_rate": 9.078824764326845e-05, "loss": 3.3677, "step": 112000 }, { "epoch": 0.9895088221268479, "grad_norm": 3.8502397537231445, "learning_rate": 9.07802087463978e-05, "loss": 3.0312, "step": 112050 }, { "epoch": 0.989950370017132, "grad_norm": 3.3244760036468506, "learning_rate": 9.077216669960181e-05, "loss": 3.4518, "step": 112100 }, { "epoch": 0.9903919179074162, "grad_norm": 3.6893279552459717, "learning_rate": 9.076412150350169e-05, "loss": 3.1172, "step": 112150 }, { "epoch": 0.9908334657977004, "grad_norm": 1.7387036085128784, "learning_rate": 9.075607315871885e-05, "loss": 3.1974, "step": 112200 }, { "epoch": 0.9912750136879847, "grad_norm": 3.1072323322296143, "learning_rate": 9.074802166587496e-05, "loss": 2.914, "step": 112250 }, { "epoch": 0.9917165615782688, "grad_norm": 2.5612170696258545, "learning_rate": 9.073996702559193e-05, "loss": 3.5433, "step": 112300 }, { "epoch": 0.992158109468553, "grad_norm": 2.5611658096313477, "learning_rate": 9.073190923849193e-05, "loss": 3.3504, "step": 112350 }, { "epoch": 0.9925996573588372, "grad_norm": 2.5978565216064453, "learning_rate": 9.07238483051973e-05, "loss": 3.011, "step": 112400 }, { "epoch": 0.9930412052491213, "grad_norm": 4.176834583282471, "learning_rate": 9.071578422633075e-05, "loss": 3.2228, "step": 112450 }, { "epoch": 0.9934827531394055, "grad_norm": 1.0866472721099854, "learning_rate": 9.070771700251512e-05, "loss": 3.4185, "step": 112500 }, { "epoch": 0.9939243010296896, "grad_norm": 2.3132870197296143, "learning_rate": 9.069964663437354e-05, "loss": 3.3366, "step": 112550 }, { "epoch": 0.9943658489199738, "grad_norm": 1.7939915657043457, "learning_rate": 9.069157312252938e-05, "loss": 3.7442, "step": 112600 }, { "epoch": 0.9948073968102581, "grad_norm": 1.947704553604126, "learning_rate": 9.068349646760626e-05, "loss": 3.3506, "step": 112650 }, { "epoch": 0.9952489447005423, "grad_norm": 1.6947288513183594, "learning_rate": 9.0675416670228e-05, "loss": 3.4434, "step": 112700 }, { "epoch": 0.9956904925908264, "grad_norm": 5.008423805236816, "learning_rate": 9.066733373101875e-05, "loss": 3.3881, "step": 112750 }, { "epoch": 0.9961320404811106, "grad_norm": 0.6654936671257019, "learning_rate": 9.06592476506028e-05, "loss": 3.3806, "step": 112800 }, { "epoch": 0.9965735883713948, "grad_norm": 4.415541172027588, "learning_rate": 9.065115842960475e-05, "loss": 2.8368, "step": 112850 }, { "epoch": 0.9970151362616789, "grad_norm": 2.217128276824951, "learning_rate": 9.064306606864943e-05, "loss": 3.4863, "step": 112900 }, { "epoch": 0.9974566841519631, "grad_norm": 2.124258518218994, "learning_rate": 9.063497056836189e-05, "loss": 3.4668, "step": 112950 }, { "epoch": 0.9978982320422473, "grad_norm": 2.5500919818878174, "learning_rate": 9.062687192936745e-05, "loss": 3.2656, "step": 113000 }, { "epoch": 0.9983397799325315, "grad_norm": 1.8848084211349487, "learning_rate": 9.061877015229166e-05, "loss": 3.3762, "step": 113050 }, { "epoch": 0.9987813278228157, "grad_norm": 1.6677478551864624, "learning_rate": 9.061066523776032e-05, "loss": 3.2986, "step": 113100 }, { "epoch": 0.9992228757130999, "grad_norm": 2.6204864978790283, "learning_rate": 9.060255718639945e-05, "loss": 3.4573, "step": 113150 }, { "epoch": 0.999664423603384, "grad_norm": 2.433886766433716, "learning_rate": 9.059444599883534e-05, "loss": 3.19, "step": 113200 }, { "epoch": 1.0001059714936682, "grad_norm": 3.542097568511963, "learning_rate": 9.058633167569451e-05, "loss": 3.1019, "step": 113250 }, { "epoch": 1.0005475193839524, "grad_norm": 3.6584270000457764, "learning_rate": 9.057821421760372e-05, "loss": 3.1219, "step": 113300 }, { "epoch": 1.0009890672742365, "grad_norm": 2.3734920024871826, "learning_rate": 9.057009362518998e-05, "loss": 3.0769, "step": 113350 }, { "epoch": 1.0014306151645207, "grad_norm": 1.0202910900115967, "learning_rate": 9.056196989908053e-05, "loss": 3.166, "step": 113400 }, { "epoch": 1.0018721630548049, "grad_norm": 5.295326232910156, "learning_rate": 9.055384303990285e-05, "loss": 3.2082, "step": 113450 }, { "epoch": 1.002313710945089, "grad_norm": 3.40751314163208, "learning_rate": 9.054571304828468e-05, "loss": 3.1396, "step": 113500 }, { "epoch": 1.0027552588353732, "grad_norm": 1.1059200763702393, "learning_rate": 9.0537579924854e-05, "loss": 3.0367, "step": 113550 }, { "epoch": 1.0031968067256574, "grad_norm": 1.1731442213058472, "learning_rate": 9.052944367023902e-05, "loss": 3.0164, "step": 113600 }, { "epoch": 1.0036383546159418, "grad_norm": 2.744382381439209, "learning_rate": 9.05213042850682e-05, "loss": 2.7251, "step": 113650 }, { "epoch": 1.004079902506226, "grad_norm": 3.96297550201416, "learning_rate": 9.051316176997023e-05, "loss": 3.0946, "step": 113700 }, { "epoch": 1.00452145039651, "grad_norm": 1.3721880912780762, "learning_rate": 9.050501612557406e-05, "loss": 3.1733, "step": 113750 }, { "epoch": 1.0049629982867943, "grad_norm": 4.159188270568848, "learning_rate": 9.049686735250886e-05, "loss": 3.3159, "step": 113800 }, { "epoch": 1.0054045461770784, "grad_norm": 2.8054113388061523, "learning_rate": 9.048871545140407e-05, "loss": 3.2084, "step": 113850 }, { "epoch": 1.0058460940673626, "grad_norm": 5.3158392906188965, "learning_rate": 9.048056042288933e-05, "loss": 3.0027, "step": 113900 }, { "epoch": 1.0062876419576468, "grad_norm": 0.9245600700378418, "learning_rate": 9.047240226759457e-05, "loss": 3.0888, "step": 113950 }, { "epoch": 1.006729189847931, "grad_norm": 2.9411399364471436, "learning_rate": 9.046424098614993e-05, "loss": 3.3146, "step": 114000 }, { "epoch": 1.006729189847931, "eval_asr_loss": 0.9158940412254022, "eval_loss": 2.9361159801483154, "eval_runtime": 20.4244, "eval_samples_per_second": 37.602, "eval_steps_per_second": 9.401, "eval_tts_loss": 6.045867164957902, "step": 114000 }, { "epoch": 1.007170737738215, "grad_norm": 1.7878565788269043, "learning_rate": 9.045607657918583e-05, "loss": 3.6228, "step": 114050 }, { "epoch": 1.0076122856284992, "grad_norm": 1.3481472730636597, "learning_rate": 9.044790904733282e-05, "loss": 3.5248, "step": 114100 }, { "epoch": 1.0080538335187834, "grad_norm": 1.8237581253051758, "learning_rate": 9.043973839122186e-05, "loss": 3.0638, "step": 114150 }, { "epoch": 1.0084953814090676, "grad_norm": 3.578498363494873, "learning_rate": 9.0431564611484e-05, "loss": 3.2789, "step": 114200 }, { "epoch": 1.0089369292993517, "grad_norm": 1.5951688289642334, "learning_rate": 9.042338770875067e-05, "loss": 3.259, "step": 114250 }, { "epoch": 1.009378477189636, "grad_norm": 2.594109058380127, "learning_rate": 9.041520768365338e-05, "loss": 2.8875, "step": 114300 }, { "epoch": 1.00982002507992, "grad_norm": 2.1558804512023926, "learning_rate": 9.040702453682402e-05, "loss": 2.6654, "step": 114350 }, { "epoch": 1.0102615729702042, "grad_norm": 2.0555760860443115, "learning_rate": 9.039883826889466e-05, "loss": 3.57, "step": 114400 }, { "epoch": 1.0107031208604886, "grad_norm": 3.0848312377929688, "learning_rate": 9.03906488804976e-05, "loss": 2.8284, "step": 114450 }, { "epoch": 1.0111446687507728, "grad_norm": 0.27899661660194397, "learning_rate": 9.038245637226543e-05, "loss": 3.164, "step": 114500 }, { "epoch": 1.011586216641057, "grad_norm": 1.8623350858688354, "learning_rate": 9.037426074483093e-05, "loss": 2.9045, "step": 114550 }, { "epoch": 1.0120277645313411, "grad_norm": 1.1736133098602295, "learning_rate": 9.036606199882716e-05, "loss": 3.3847, "step": 114600 }, { "epoch": 1.0124693124216253, "grad_norm": 6.291317462921143, "learning_rate": 9.035786013488739e-05, "loss": 3.3651, "step": 114650 }, { "epoch": 1.0129108603119095, "grad_norm": 3.4732656478881836, "learning_rate": 9.034965515364515e-05, "loss": 3.8414, "step": 114700 }, { "epoch": 1.0133524082021936, "grad_norm": 3.2274088859558105, "learning_rate": 9.034144705573422e-05, "loss": 2.9208, "step": 114750 }, { "epoch": 1.0137939560924778, "grad_norm": 1.771211862564087, "learning_rate": 9.033323584178857e-05, "loss": 3.3398, "step": 114800 }, { "epoch": 1.014235503982762, "grad_norm": 2.4299356937408447, "learning_rate": 9.032502151244246e-05, "loss": 3.5498, "step": 114850 }, { "epoch": 1.0146770518730461, "grad_norm": 3.958322286605835, "learning_rate": 9.031680406833041e-05, "loss": 3.13, "step": 114900 }, { "epoch": 1.0151185997633303, "grad_norm": 1.6944149732589722, "learning_rate": 9.030858351008711e-05, "loss": 3.3474, "step": 114950 }, { "epoch": 1.0155601476536145, "grad_norm": 1.9284944534301758, "learning_rate": 9.030035983834754e-05, "loss": 3.3015, "step": 115000 }, { "epoch": 1.0160016955438986, "grad_norm": 9.017916679382324, "learning_rate": 9.02921330537469e-05, "loss": 3.0109, "step": 115050 }, { "epoch": 1.0164432434341828, "grad_norm": 2.3894286155700684, "learning_rate": 9.028390315692066e-05, "loss": 3.0692, "step": 115100 }, { "epoch": 1.016884791324467, "grad_norm": 3.3184964656829834, "learning_rate": 9.02756701485045e-05, "loss": 2.9476, "step": 115150 }, { "epoch": 1.0173263392147511, "grad_norm": 1.9609465599060059, "learning_rate": 9.026743402913435e-05, "loss": 2.94, "step": 115200 }, { "epoch": 1.0177678871050355, "grad_norm": 1.2730053663253784, "learning_rate": 9.025919479944638e-05, "loss": 3.3829, "step": 115250 }, { "epoch": 1.0182094349953197, "grad_norm": 2.1093947887420654, "learning_rate": 9.025095246007699e-05, "loss": 3.3117, "step": 115300 }, { "epoch": 1.0186509828856039, "grad_norm": 4.5406670570373535, "learning_rate": 9.024270701166285e-05, "loss": 3.3581, "step": 115350 }, { "epoch": 1.019092530775888, "grad_norm": 2.3314759731292725, "learning_rate": 9.023445845484083e-05, "loss": 3.2504, "step": 115400 }, { "epoch": 1.0195340786661722, "grad_norm": 1.8314716815948486, "learning_rate": 9.022620679024807e-05, "loss": 3.0811, "step": 115450 }, { "epoch": 1.0199756265564563, "grad_norm": 3.113945960998535, "learning_rate": 9.021795201852197e-05, "loss": 2.9139, "step": 115500 }, { "epoch": 1.0204171744467405, "grad_norm": 7.921067714691162, "learning_rate": 9.020969414030009e-05, "loss": 3.1436, "step": 115550 }, { "epoch": 1.0208587223370247, "grad_norm": 2.9270060062408447, "learning_rate": 9.020143315622032e-05, "loss": 3.2475, "step": 115600 }, { "epoch": 1.0213002702273088, "grad_norm": 2.903693675994873, "learning_rate": 9.019316906692072e-05, "loss": 3.5003, "step": 115650 }, { "epoch": 1.021741818117593, "grad_norm": 2.832529306411743, "learning_rate": 9.018490187303966e-05, "loss": 3.0619, "step": 115700 }, { "epoch": 1.0221833660078772, "grad_norm": 2.0733742713928223, "learning_rate": 9.017663157521567e-05, "loss": 3.1382, "step": 115750 }, { "epoch": 1.0226249138981613, "grad_norm": 2.4163548946380615, "learning_rate": 9.016835817408759e-05, "loss": 3.6552, "step": 115800 }, { "epoch": 1.0230664617884455, "grad_norm": 4.126326084136963, "learning_rate": 9.016008167029445e-05, "loss": 3.3899, "step": 115850 }, { "epoch": 1.0235080096787297, "grad_norm": 2.870994806289673, "learning_rate": 9.015180206447556e-05, "loss": 3.1306, "step": 115900 }, { "epoch": 1.0239495575690138, "grad_norm": 2.4682250022888184, "learning_rate": 9.014351935727041e-05, "loss": 3.3603, "step": 115950 }, { "epoch": 1.024391105459298, "grad_norm": 1.5408371686935425, "learning_rate": 9.013523354931883e-05, "loss": 3.3107, "step": 116000 }, { "epoch": 1.0248326533495824, "grad_norm": 3.0599887371063232, "learning_rate": 9.012694464126077e-05, "loss": 3.2021, "step": 116050 }, { "epoch": 1.0252742012398666, "grad_norm": 2.876763105392456, "learning_rate": 9.011865263373653e-05, "loss": 3.4132, "step": 116100 }, { "epoch": 1.0257157491301507, "grad_norm": 1.7822761535644531, "learning_rate": 9.011035752738655e-05, "loss": 3.1308, "step": 116150 }, { "epoch": 1.026157297020435, "grad_norm": 2.372128963470459, "learning_rate": 9.010205932285159e-05, "loss": 2.9991, "step": 116200 }, { "epoch": 1.026598844910719, "grad_norm": 3.578298568725586, "learning_rate": 9.00937580207726e-05, "loss": 3.0682, "step": 116250 }, { "epoch": 1.0270403928010032, "grad_norm": 2.4689536094665527, "learning_rate": 9.008545362179077e-05, "loss": 3.5783, "step": 116300 }, { "epoch": 1.0274819406912874, "grad_norm": 1.7478725910186768, "learning_rate": 9.007714612654759e-05, "loss": 3.2391, "step": 116350 }, { "epoch": 1.0279234885815716, "grad_norm": 1.0738362073898315, "learning_rate": 9.006883553568469e-05, "loss": 2.953, "step": 116400 }, { "epoch": 1.0283650364718557, "grad_norm": 2.858876943588257, "learning_rate": 9.006052184984403e-05, "loss": 3.3315, "step": 116450 }, { "epoch": 1.02880658436214, "grad_norm": 3.038067102432251, "learning_rate": 9.005220506966776e-05, "loss": 3.0056, "step": 116500 }, { "epoch": 1.029248132252424, "grad_norm": 2.7625601291656494, "learning_rate": 9.004388519579829e-05, "loss": 3.4374, "step": 116550 }, { "epoch": 1.0296896801427082, "grad_norm": 1.6374485492706299, "learning_rate": 9.003556222887823e-05, "loss": 3.2502, "step": 116600 }, { "epoch": 1.0301312280329924, "grad_norm": 4.125757694244385, "learning_rate": 9.002723616955049e-05, "loss": 2.8794, "step": 116650 }, { "epoch": 1.0305727759232766, "grad_norm": 1.5545926094055176, "learning_rate": 9.001890701845819e-05, "loss": 3.3588, "step": 116700 }, { "epoch": 1.0310143238135607, "grad_norm": 6.130286693572998, "learning_rate": 9.001057477624467e-05, "loss": 3.479, "step": 116750 }, { "epoch": 1.031455871703845, "grad_norm": 1.1050196886062622, "learning_rate": 9.000223944355351e-05, "loss": 3.1521, "step": 116800 }, { "epoch": 1.0318974195941293, "grad_norm": 4.7166056632995605, "learning_rate": 8.999390102102858e-05, "loss": 3.0597, "step": 116850 }, { "epoch": 1.0323389674844135, "grad_norm": 5.70416259765625, "learning_rate": 8.998555950931392e-05, "loss": 3.5213, "step": 116900 }, { "epoch": 1.0327805153746976, "grad_norm": 2.4848475456237793, "learning_rate": 8.997721490905386e-05, "loss": 3.5041, "step": 116950 }, { "epoch": 1.0332220632649818, "grad_norm": 1.464582920074463, "learning_rate": 8.996886722089295e-05, "loss": 3.0542, "step": 117000 }, { "epoch": 1.0332220632649818, "eval_asr_loss": 0.9235363394382403, "eval_loss": 2.937559127807617, "eval_runtime": 22.1307, "eval_samples_per_second": 34.703, "eval_steps_per_second": 8.676, "eval_tts_loss": 5.999984977433617, "step": 117000 }, { "epoch": 1.033663611155266, "grad_norm": 0.5302391648292542, "learning_rate": 8.996051644547598e-05, "loss": 3.2493, "step": 117050 }, { "epoch": 1.0341051590455501, "grad_norm": 1.5811774730682373, "learning_rate": 8.995216258344796e-05, "loss": 3.317, "step": 117100 }, { "epoch": 1.0345467069358343, "grad_norm": 1.782463550567627, "learning_rate": 8.994380563545417e-05, "loss": 2.9838, "step": 117150 }, { "epoch": 1.0349882548261184, "grad_norm": 4.319481372833252, "learning_rate": 8.99354456021401e-05, "loss": 3.1259, "step": 117200 }, { "epoch": 1.0354298027164026, "grad_norm": 2.537027597427368, "learning_rate": 8.99270824841515e-05, "loss": 3.2959, "step": 117250 }, { "epoch": 1.0358713506066868, "grad_norm": 0.6770037412643433, "learning_rate": 8.991871628213436e-05, "loss": 3.3386, "step": 117300 }, { "epoch": 1.036312898496971, "grad_norm": 2.032209634780884, "learning_rate": 8.991034699673488e-05, "loss": 3.0363, "step": 117350 }, { "epoch": 1.0367544463872551, "grad_norm": 1.5942469835281372, "learning_rate": 8.990197462859952e-05, "loss": 3.5947, "step": 117400 }, { "epoch": 1.0371959942775393, "grad_norm": 3.661116361618042, "learning_rate": 8.989359917837498e-05, "loss": 3.1945, "step": 117450 }, { "epoch": 1.0376375421678234, "grad_norm": 1.773616075515747, "learning_rate": 8.988522064670822e-05, "loss": 2.7827, "step": 117500 }, { "epoch": 1.0380790900581076, "grad_norm": 7.883002758026123, "learning_rate": 8.987683903424636e-05, "loss": 3.0637, "step": 117550 }, { "epoch": 1.0385206379483918, "grad_norm": 1.7299555540084839, "learning_rate": 8.986845434163682e-05, "loss": 3.4542, "step": 117600 }, { "epoch": 1.0389621858386762, "grad_norm": 1.8580836057662964, "learning_rate": 8.986006656952727e-05, "loss": 3.1663, "step": 117650 }, { "epoch": 1.0394037337289603, "grad_norm": 1.1546132564544678, "learning_rate": 8.985167571856556e-05, "loss": 3.1597, "step": 117700 }, { "epoch": 1.0398452816192445, "grad_norm": 4.701486110687256, "learning_rate": 8.984328178939985e-05, "loss": 3.4678, "step": 117750 }, { "epoch": 1.0402868295095287, "grad_norm": 1.7120305299758911, "learning_rate": 8.98348847826785e-05, "loss": 3.0328, "step": 117800 }, { "epoch": 1.0407283773998128, "grad_norm": 2.6418991088867188, "learning_rate": 8.982648469905006e-05, "loss": 3.137, "step": 117850 }, { "epoch": 1.041169925290097, "grad_norm": 1.3650034666061401, "learning_rate": 8.981808153916341e-05, "loss": 2.8192, "step": 117900 }, { "epoch": 1.0416114731803812, "grad_norm": 0.9207521677017212, "learning_rate": 8.980967530366762e-05, "loss": 3.3502, "step": 117950 }, { "epoch": 1.0420530210706653, "grad_norm": 4.987005233764648, "learning_rate": 8.980126599321197e-05, "loss": 3.1026, "step": 118000 }, { "epoch": 1.0424945689609495, "grad_norm": 5.551382064819336, "learning_rate": 8.979285360844602e-05, "loss": 3.0126, "step": 118050 }, { "epoch": 1.0429361168512337, "grad_norm": 3.7902963161468506, "learning_rate": 8.97844381500196e-05, "loss": 3.6057, "step": 118100 }, { "epoch": 1.0433776647415178, "grad_norm": 2.5070292949676514, "learning_rate": 8.977601961858267e-05, "loss": 3.2931, "step": 118150 }, { "epoch": 1.043819212631802, "grad_norm": 1.3204067945480347, "learning_rate": 8.97675980147855e-05, "loss": 3.6482, "step": 118200 }, { "epoch": 1.0442607605220862, "grad_norm": 1.2875250577926636, "learning_rate": 8.975917333927862e-05, "loss": 2.8817, "step": 118250 }, { "epoch": 1.0447023084123703, "grad_norm": 3.1866626739501953, "learning_rate": 8.975074559271275e-05, "loss": 3.4228, "step": 118300 }, { "epoch": 1.0451438563026545, "grad_norm": 2.342751979827881, "learning_rate": 8.974231477573885e-05, "loss": 3.4827, "step": 118350 }, { "epoch": 1.0455854041929387, "grad_norm": 4.2284979820251465, "learning_rate": 8.973388088900814e-05, "loss": 2.6648, "step": 118400 }, { "epoch": 1.046026952083223, "grad_norm": 1.4045745134353638, "learning_rate": 8.972544393317208e-05, "loss": 2.9577, "step": 118450 }, { "epoch": 1.0464684999735072, "grad_norm": 1.3765054941177368, "learning_rate": 8.971700390888233e-05, "loss": 3.0889, "step": 118500 }, { "epoch": 1.0469100478637914, "grad_norm": 1.7757630348205566, "learning_rate": 8.970856081679081e-05, "loss": 3.2674, "step": 118550 }, { "epoch": 1.0473515957540755, "grad_norm": 2.229665517807007, "learning_rate": 8.970011465754969e-05, "loss": 3.1471, "step": 118600 }, { "epoch": 1.0477931436443597, "grad_norm": 2.3084306716918945, "learning_rate": 8.969166543181136e-05, "loss": 3.3577, "step": 118650 }, { "epoch": 1.0482346915346439, "grad_norm": 1.667510747909546, "learning_rate": 8.968321314022845e-05, "loss": 2.8539, "step": 118700 }, { "epoch": 1.048676239424928, "grad_norm": 2.1955771446228027, "learning_rate": 8.967475778345385e-05, "loss": 3.0149, "step": 118750 }, { "epoch": 1.0491177873152122, "grad_norm": 3.0574069023132324, "learning_rate": 8.966629936214062e-05, "loss": 3.5799, "step": 118800 }, { "epoch": 1.0495593352054964, "grad_norm": 3.2477333545684814, "learning_rate": 8.965783787694212e-05, "loss": 3.618, "step": 118850 }, { "epoch": 1.0500008830957805, "grad_norm": 1.1270619630813599, "learning_rate": 8.964937332851193e-05, "loss": 3.346, "step": 118900 }, { "epoch": 1.0504424309860647, "grad_norm": 3.402944803237915, "learning_rate": 8.964090571750389e-05, "loss": 3.252, "step": 118950 }, { "epoch": 1.0508839788763489, "grad_norm": 3.217515230178833, "learning_rate": 8.963243504457202e-05, "loss": 2.9268, "step": 119000 }, { "epoch": 1.051325526766633, "grad_norm": 1.877545714378357, "learning_rate": 8.962396131037061e-05, "loss": 3.0917, "step": 119050 }, { "epoch": 1.0517670746569172, "grad_norm": 0.7133153676986694, "learning_rate": 8.96154845155542e-05, "loss": 3.386, "step": 119100 }, { "epoch": 1.0522086225472014, "grad_norm": 1.9354885816574097, "learning_rate": 8.960700466077751e-05, "loss": 2.8361, "step": 119150 }, { "epoch": 1.0526501704374855, "grad_norm": 1.1146783828735352, "learning_rate": 8.95985217466956e-05, "loss": 3.2829, "step": 119200 }, { "epoch": 1.05309171832777, "grad_norm": 1.3031646013259888, "learning_rate": 8.959003577396367e-05, "loss": 3.1304, "step": 119250 }, { "epoch": 1.053533266218054, "grad_norm": 2.9102251529693604, "learning_rate": 8.958154674323717e-05, "loss": 3.3583, "step": 119300 }, { "epoch": 1.0539748141083383, "grad_norm": 1.9654988050460815, "learning_rate": 8.957305465517185e-05, "loss": 3.1736, "step": 119350 }, { "epoch": 1.0544163619986224, "grad_norm": 4.362683296203613, "learning_rate": 8.956455951042361e-05, "loss": 2.8865, "step": 119400 }, { "epoch": 1.0548579098889066, "grad_norm": 2.9880433082580566, "learning_rate": 8.955606130964865e-05, "loss": 3.3161, "step": 119450 }, { "epoch": 1.0552994577791908, "grad_norm": 2.5000228881835938, "learning_rate": 8.954756005350339e-05, "loss": 2.7501, "step": 119500 }, { "epoch": 1.055741005669475, "grad_norm": 3.525005578994751, "learning_rate": 8.953905574264445e-05, "loss": 3.082, "step": 119550 }, { "epoch": 1.056182553559759, "grad_norm": 3.375227928161621, "learning_rate": 8.953054837772874e-05, "loss": 3.4092, "step": 119600 }, { "epoch": 1.0566241014500433, "grad_norm": 7.511069297790527, "learning_rate": 8.952203795941339e-05, "loss": 3.2211, "step": 119650 }, { "epoch": 1.0570656493403274, "grad_norm": 1.7979106903076172, "learning_rate": 8.951369478768952e-05, "loss": 2.9699, "step": 119700 }, { "epoch": 1.0575071972306116, "grad_norm": 1.3295596837997437, "learning_rate": 8.95051783255824e-05, "loss": 2.8737, "step": 119750 }, { "epoch": 1.0579487451208958, "grad_norm": 1.0681979656219482, "learning_rate": 8.949682923220605e-05, "loss": 3.1526, "step": 119800 }, { "epoch": 1.05839029301118, "grad_norm": 8.75452709197998, "learning_rate": 8.948830672888615e-05, "loss": 3.0168, "step": 119850 }, { "epoch": 1.058831840901464, "grad_norm": 2.01828670501709, "learning_rate": 8.947978117542938e-05, "loss": 2.943, "step": 119900 }, { "epoch": 1.0592733887917483, "grad_norm": 2.2010951042175293, "learning_rate": 8.947125257249429e-05, "loss": 3.1595, "step": 119950 }, { "epoch": 1.0597149366820324, "grad_norm": 1.8590667247772217, "learning_rate": 8.946272092073967e-05, "loss": 3.2562, "step": 120000 }, { "epoch": 1.0597149366820324, "eval_asr_loss": 0.9155421388644238, "eval_loss": 2.9473683834075928, "eval_runtime": 20.7175, "eval_samples_per_second": 37.07, "eval_steps_per_second": 9.268, "eval_tts_loss": 5.970084495848347, "step": 120000 }, { "epoch": 1.0601564845723168, "grad_norm": 1.69383704662323, "learning_rate": 8.945418622082446e-05, "loss": 3.316, "step": 120050 }, { "epoch": 1.060598032462601, "grad_norm": 1.9276316165924072, "learning_rate": 8.944564847340796e-05, "loss": 3.0642, "step": 120100 }, { "epoch": 1.0610395803528851, "grad_norm": 4.82901668548584, "learning_rate": 8.943710767914958e-05, "loss": 2.7604, "step": 120150 }, { "epoch": 1.0614811282431693, "grad_norm": 0.9138553142547607, "learning_rate": 8.942856383870905e-05, "loss": 3.1058, "step": 120200 }, { "epoch": 1.0619226761334535, "grad_norm": 1.6439554691314697, "learning_rate": 8.942001695274632e-05, "loss": 3.7622, "step": 120250 }, { "epoch": 1.0623642240237376, "grad_norm": 3.0835959911346436, "learning_rate": 8.941146702192154e-05, "loss": 3.4381, "step": 120300 }, { "epoch": 1.0628057719140218, "grad_norm": 1.632073163986206, "learning_rate": 8.940291404689513e-05, "loss": 2.7266, "step": 120350 }, { "epoch": 1.063247319804306, "grad_norm": 1.424759864807129, "learning_rate": 8.939435802832776e-05, "loss": 3.1694, "step": 120400 }, { "epoch": 1.0636888676945901, "grad_norm": 1.0503005981445312, "learning_rate": 8.938579896688027e-05, "loss": 3.2015, "step": 120450 }, { "epoch": 1.0641304155848743, "grad_norm": 3.014944314956665, "learning_rate": 8.93772368632138e-05, "loss": 3.1513, "step": 120500 }, { "epoch": 1.0645719634751585, "grad_norm": 4.032464981079102, "learning_rate": 8.936867171798968e-05, "loss": 3.2884, "step": 120550 }, { "epoch": 1.0650135113654426, "grad_norm": 1.6288483142852783, "learning_rate": 8.93601035318695e-05, "loss": 3.1507, "step": 120600 }, { "epoch": 1.0654550592557268, "grad_norm": 2.872183322906494, "learning_rate": 8.935153230551512e-05, "loss": 3.4061, "step": 120650 }, { "epoch": 1.065896607146011, "grad_norm": 0.9016225337982178, "learning_rate": 8.934295803958854e-05, "loss": 3.262, "step": 120700 }, { "epoch": 1.0663381550362951, "grad_norm": 3.425549030303955, "learning_rate": 8.933438073475206e-05, "loss": 3.2199, "step": 120750 }, { "epoch": 1.0667797029265795, "grad_norm": 1.639690637588501, "learning_rate": 8.932580039166823e-05, "loss": 3.2348, "step": 120800 }, { "epoch": 1.0672212508168637, "grad_norm": 1.3123271465301514, "learning_rate": 8.931721701099977e-05, "loss": 2.9831, "step": 120850 }, { "epoch": 1.0676627987071479, "grad_norm": 2.0268373489379883, "learning_rate": 8.930863059340973e-05, "loss": 3.5563, "step": 120900 }, { "epoch": 1.068104346597432, "grad_norm": 2.0526180267333984, "learning_rate": 8.930004113956127e-05, "loss": 3.1924, "step": 120950 }, { "epoch": 1.0685458944877162, "grad_norm": 1.7065848112106323, "learning_rate": 8.929144865011791e-05, "loss": 3.0529, "step": 121000 }, { "epoch": 1.0689874423780004, "grad_norm": 2.654390335083008, "learning_rate": 8.92828531257433e-05, "loss": 3.4986, "step": 121050 }, { "epoch": 1.0694289902682845, "grad_norm": 2.723402738571167, "learning_rate": 8.927425456710141e-05, "loss": 3.3786, "step": 121100 }, { "epoch": 1.0698705381585687, "grad_norm": 1.8961224555969238, "learning_rate": 8.92656529748564e-05, "loss": 3.4482, "step": 121150 }, { "epoch": 1.0703120860488529, "grad_norm": 2.6403260231018066, "learning_rate": 8.925704834967266e-05, "loss": 3.0291, "step": 121200 }, { "epoch": 1.070753633939137, "grad_norm": 2.748976469039917, "learning_rate": 8.924844069221481e-05, "loss": 2.7512, "step": 121250 }, { "epoch": 1.0711951818294212, "grad_norm": 3.6677534580230713, "learning_rate": 8.923983000314774e-05, "loss": 3.4528, "step": 121300 }, { "epoch": 1.0716367297197054, "grad_norm": 1.444061517715454, "learning_rate": 8.923121628313654e-05, "loss": 3.1281, "step": 121350 }, { "epoch": 1.0720782776099895, "grad_norm": 3.1170246601104736, "learning_rate": 8.922259953284658e-05, "loss": 3.3235, "step": 121400 }, { "epoch": 1.0725198255002737, "grad_norm": 2.0666680335998535, "learning_rate": 8.921397975294337e-05, "loss": 3.0791, "step": 121450 }, { "epoch": 1.0729613733905579, "grad_norm": 0.9159076809883118, "learning_rate": 8.920535694409276e-05, "loss": 3.1767, "step": 121500 }, { "epoch": 1.073402921280842, "grad_norm": 1.4360718727111816, "learning_rate": 8.919673110696078e-05, "loss": 3.1677, "step": 121550 }, { "epoch": 1.0738444691711262, "grad_norm": 6.167787551879883, "learning_rate": 8.91881022422137e-05, "loss": 2.988, "step": 121600 }, { "epoch": 1.0742860170614106, "grad_norm": 1.9026552438735962, "learning_rate": 8.917947035051804e-05, "loss": 3.4476, "step": 121650 }, { "epoch": 1.0747275649516947, "grad_norm": 2.0857155323028564, "learning_rate": 8.91708354325405e-05, "loss": 3.2904, "step": 121700 }, { "epoch": 1.075169112841979, "grad_norm": 2.137205123901367, "learning_rate": 8.916219748894811e-05, "loss": 3.1856, "step": 121750 }, { "epoch": 1.075610660732263, "grad_norm": 1.6799383163452148, "learning_rate": 8.915355652040804e-05, "loss": 3.3108, "step": 121800 }, { "epoch": 1.0760522086225472, "grad_norm": 2.878725290298462, "learning_rate": 8.914491252758773e-05, "loss": 3.303, "step": 121850 }, { "epoch": 1.0764937565128314, "grad_norm": 1.8924479484558105, "learning_rate": 8.913626551115489e-05, "loss": 2.9533, "step": 121900 }, { "epoch": 1.0769353044031156, "grad_norm": 3.5954272747039795, "learning_rate": 8.912761547177737e-05, "loss": 3.0536, "step": 121950 }, { "epoch": 1.0773768522933997, "grad_norm": 3.484809160232544, "learning_rate": 8.911896241012337e-05, "loss": 2.6292, "step": 122000 }, { "epoch": 1.077818400183684, "grad_norm": 2.250790596008301, "learning_rate": 8.91104794781339e-05, "loss": 3.3854, "step": 122050 }, { "epoch": 1.078259948073968, "grad_norm": 1.3487905263900757, "learning_rate": 8.910182043434451e-05, "loss": 3.3714, "step": 122100 }, { "epoch": 1.0787014959642522, "grad_norm": 1.1757347583770752, "learning_rate": 8.909315837027104e-05, "loss": 3.3735, "step": 122150 }, { "epoch": 1.0791430438545364, "grad_norm": 1.4942172765731812, "learning_rate": 8.908449328658259e-05, "loss": 3.4967, "step": 122200 }, { "epoch": 1.0795845917448206, "grad_norm": 1.2587921619415283, "learning_rate": 8.907582518394846e-05, "loss": 3.3902, "step": 122250 }, { "epoch": 1.0800261396351047, "grad_norm": 5.078591823577881, "learning_rate": 8.90671540630382e-05, "loss": 2.8449, "step": 122300 }, { "epoch": 1.080467687525389, "grad_norm": 1.2880104780197144, "learning_rate": 8.905847992452154e-05, "loss": 3.0292, "step": 122350 }, { "epoch": 1.0809092354156733, "grad_norm": 2.9822380542755127, "learning_rate": 8.904980276906854e-05, "loss": 3.5007, "step": 122400 }, { "epoch": 1.0813507833059575, "grad_norm": 1.5988084077835083, "learning_rate": 8.90411225973494e-05, "loss": 3.1964, "step": 122450 }, { "epoch": 1.0817923311962416, "grad_norm": 2.4721879959106445, "learning_rate": 8.90324394100346e-05, "loss": 3.3969, "step": 122500 }, { "epoch": 1.0822338790865258, "grad_norm": 2.2065553665161133, "learning_rate": 8.902375320779483e-05, "loss": 3.1513, "step": 122550 }, { "epoch": 1.08267542697681, "grad_norm": 4.793309688568115, "learning_rate": 8.901506399130104e-05, "loss": 3.1388, "step": 122600 }, { "epoch": 1.0831169748670941, "grad_norm": 2.873149871826172, "learning_rate": 8.900637176122439e-05, "loss": 3.1295, "step": 122650 }, { "epoch": 1.0835585227573783, "grad_norm": 1.5303065776824951, "learning_rate": 8.89976765182363e-05, "loss": 3.1349, "step": 122700 }, { "epoch": 1.0840000706476625, "grad_norm": 2.5381152629852295, "learning_rate": 8.89889782630084e-05, "loss": 2.9839, "step": 122750 }, { "epoch": 1.0844416185379466, "grad_norm": 1.606925368309021, "learning_rate": 8.898027699621253e-05, "loss": 2.9266, "step": 122800 }, { "epoch": 1.0848831664282308, "grad_norm": 2.0156993865966797, "learning_rate": 8.897157271852079e-05, "loss": 3.6531, "step": 122850 }, { "epoch": 1.085324714318515, "grad_norm": 2.879831552505493, "learning_rate": 8.896286543060555e-05, "loss": 3.1662, "step": 122900 }, { "epoch": 1.0857662622087991, "grad_norm": 1.4810205698013306, "learning_rate": 8.895415513313934e-05, "loss": 3.2344, "step": 122950 }, { "epoch": 1.0862078100990833, "grad_norm": 3.132847547531128, "learning_rate": 8.894544182679497e-05, "loss": 3.4795, "step": 123000 }, { "epoch": 1.0862078100990833, "eval_asr_loss": 0.9171379533371528, "eval_loss": 2.9432666301727295, "eval_runtime": 20.3376, "eval_samples_per_second": 37.763, "eval_steps_per_second": 9.441, "eval_tts_loss": 6.0161064579099435, "step": 123000 }, { "epoch": 1.0866493579893675, "grad_norm": 3.2920241355895996, "learning_rate": 8.893672551224547e-05, "loss": 3.0528, "step": 123050 }, { "epoch": 1.0870909058796516, "grad_norm": 3.9480741024017334, "learning_rate": 8.892800619016409e-05, "loss": 2.9215, "step": 123100 }, { "epoch": 1.0875324537699358, "grad_norm": 1.113074541091919, "learning_rate": 8.891928386122434e-05, "loss": 2.6988, "step": 123150 }, { "epoch": 1.08797400166022, "grad_norm": 3.499051094055176, "learning_rate": 8.891055852609992e-05, "loss": 3.4827, "step": 123200 }, { "epoch": 1.0884155495505043, "grad_norm": 3.23872971534729, "learning_rate": 8.890183018546483e-05, "loss": 3.1988, "step": 123250 }, { "epoch": 1.0888570974407885, "grad_norm": 2.035611391067505, "learning_rate": 8.889309883999321e-05, "loss": 3.414, "step": 123300 }, { "epoch": 1.0892986453310727, "grad_norm": 2.9164116382598877, "learning_rate": 8.888436449035955e-05, "loss": 2.8317, "step": 123350 }, { "epoch": 1.0897401932213568, "grad_norm": 2.3770909309387207, "learning_rate": 8.887562713723843e-05, "loss": 2.7964, "step": 123400 }, { "epoch": 1.090181741111641, "grad_norm": 1.5850526094436646, "learning_rate": 8.886688678130477e-05, "loss": 2.9763, "step": 123450 }, { "epoch": 1.0906232890019252, "grad_norm": 3.0644419193267822, "learning_rate": 8.885814342323369e-05, "loss": 3.0505, "step": 123500 }, { "epoch": 1.0910648368922093, "grad_norm": 4.172904968261719, "learning_rate": 8.884939706370055e-05, "loss": 3.088, "step": 123550 }, { "epoch": 1.0915063847824935, "grad_norm": 2.7594523429870605, "learning_rate": 8.884064770338092e-05, "loss": 3.414, "step": 123600 }, { "epoch": 1.0919479326727777, "grad_norm": 2.1696181297302246, "learning_rate": 8.88318953429506e-05, "loss": 3.0237, "step": 123650 }, { "epoch": 1.0923894805630618, "grad_norm": 2.1066668033599854, "learning_rate": 8.882313998308564e-05, "loss": 3.3296, "step": 123700 }, { "epoch": 1.092831028453346, "grad_norm": 2.821223020553589, "learning_rate": 8.881438162446236e-05, "loss": 2.9404, "step": 123750 }, { "epoch": 1.0932725763436302, "grad_norm": 1.817001223564148, "learning_rate": 8.880562026775721e-05, "loss": 2.9152, "step": 123800 }, { "epoch": 1.0937141242339143, "grad_norm": 1.3523433208465576, "learning_rate": 8.879685591364697e-05, "loss": 3.2203, "step": 123850 }, { "epoch": 1.0941556721241985, "grad_norm": 3.8314602375030518, "learning_rate": 8.87880885628086e-05, "loss": 2.6743, "step": 123900 }, { "epoch": 1.0945972200144827, "grad_norm": 5.9612321853637695, "learning_rate": 8.87793182159193e-05, "loss": 3.0251, "step": 123950 }, { "epoch": 1.095038767904767, "grad_norm": 9.792705535888672, "learning_rate": 8.87705448736565e-05, "loss": 3.019, "step": 124000 }, { "epoch": 1.0954803157950512, "grad_norm": 2.604074001312256, "learning_rate": 8.876176853669786e-05, "loss": 3.308, "step": 124050 }, { "epoch": 1.0959218636853354, "grad_norm": 5.136208534240723, "learning_rate": 8.87529892057213e-05, "loss": 3.1035, "step": 124100 }, { "epoch": 1.0963634115756196, "grad_norm": 3.2921555042266846, "learning_rate": 8.874438255722164e-05, "loss": 3.6894, "step": 124150 }, { "epoch": 1.0968049594659037, "grad_norm": 2.1269774436950684, "learning_rate": 8.87355973000904e-05, "loss": 3.0221, "step": 124200 }, { "epoch": 1.097246507356188, "grad_norm": 2.0114710330963135, "learning_rate": 8.872680905096278e-05, "loss": 3.4012, "step": 124250 }, { "epoch": 1.097688055246472, "grad_norm": 4.107577323913574, "learning_rate": 8.871801781051753e-05, "loss": 3.2551, "step": 124300 }, { "epoch": 1.0981296031367562, "grad_norm": 2.8425934314727783, "learning_rate": 8.870922357943373e-05, "loss": 3.2815, "step": 124350 }, { "epoch": 1.0985711510270404, "grad_norm": 2.750260829925537, "learning_rate": 8.870042635839065e-05, "loss": 2.9947, "step": 124400 }, { "epoch": 1.0990126989173246, "grad_norm": 2.1644296646118164, "learning_rate": 8.869162614806781e-05, "loss": 3.4007, "step": 124450 }, { "epoch": 1.0994542468076087, "grad_norm": 4.535878658294678, "learning_rate": 8.868282294914493e-05, "loss": 2.8193, "step": 124500 }, { "epoch": 1.099895794697893, "grad_norm": 0.610397458076477, "learning_rate": 8.867401676230202e-05, "loss": 3.3091, "step": 124550 }, { "epoch": 1.100337342588177, "grad_norm": 1.3316540718078613, "learning_rate": 8.866520758821926e-05, "loss": 2.9345, "step": 124600 }, { "epoch": 1.1007788904784612, "grad_norm": 2.5452449321746826, "learning_rate": 8.865639542757706e-05, "loss": 3.0348, "step": 124650 }, { "epoch": 1.1012204383687454, "grad_norm": 1.1717711687088013, "learning_rate": 8.864758028105614e-05, "loss": 3.6851, "step": 124700 }, { "epoch": 1.1016619862590296, "grad_norm": 1.1457619667053223, "learning_rate": 8.863876214933735e-05, "loss": 2.7013, "step": 124750 }, { "epoch": 1.1021035341493137, "grad_norm": 3.3232429027557373, "learning_rate": 8.862994103310183e-05, "loss": 3.4489, "step": 124800 }, { "epoch": 1.1025450820395981, "grad_norm": 2.6263210773468018, "learning_rate": 8.862111693303094e-05, "loss": 3.5465, "step": 124850 }, { "epoch": 1.1029866299298823, "grad_norm": 2.42629337310791, "learning_rate": 8.861228984980626e-05, "loss": 3.1175, "step": 124900 }, { "epoch": 1.1034281778201664, "grad_norm": 2.722221612930298, "learning_rate": 8.860345978410962e-05, "loss": 3.1893, "step": 124950 }, { "epoch": 1.1038697257104506, "grad_norm": 2.3193819522857666, "learning_rate": 8.859462673662305e-05, "loss": 3.6755, "step": 125000 }, { "epoch": 1.1043112736007348, "grad_norm": 2.0361616611480713, "learning_rate": 8.858579070802883e-05, "loss": 3.7692, "step": 125050 }, { "epoch": 1.104752821491019, "grad_norm": 2.403743028640747, "learning_rate": 8.85769516990095e-05, "loss": 3.03, "step": 125100 }, { "epoch": 1.105194369381303, "grad_norm": 6.095032691955566, "learning_rate": 8.856810971024776e-05, "loss": 3.4562, "step": 125150 }, { "epoch": 1.1056359172715873, "grad_norm": 3.28342866897583, "learning_rate": 8.85592647424266e-05, "loss": 3.3188, "step": 125200 }, { "epoch": 1.1060774651618714, "grad_norm": 2.5184452533721924, "learning_rate": 8.85504167962292e-05, "loss": 3.3513, "step": 125250 }, { "epoch": 1.1065190130521556, "grad_norm": 1.7279000282287598, "learning_rate": 8.854156587233902e-05, "loss": 3.5462, "step": 125300 }, { "epoch": 1.1069605609424398, "grad_norm": 2.6725499629974365, "learning_rate": 8.853271197143969e-05, "loss": 2.5798, "step": 125350 }, { "epoch": 1.107402108832724, "grad_norm": 1.1816620826721191, "learning_rate": 8.85238550942151e-05, "loss": 3.5221, "step": 125400 }, { "epoch": 1.107843656723008, "grad_norm": 1.639984369277954, "learning_rate": 8.851499524134939e-05, "loss": 2.9796, "step": 125450 }, { "epoch": 1.1082852046132923, "grad_norm": 5.580627918243408, "learning_rate": 8.850613241352688e-05, "loss": 3.2337, "step": 125500 }, { "epoch": 1.1087267525035764, "grad_norm": 4.015604019165039, "learning_rate": 8.849726661143217e-05, "loss": 3.013, "step": 125550 }, { "epoch": 1.1091683003938608, "grad_norm": 5.388280391693115, "learning_rate": 8.848839783575007e-05, "loss": 3.2899, "step": 125600 }, { "epoch": 1.109609848284145, "grad_norm": 1.1181635856628418, "learning_rate": 8.847952608716559e-05, "loss": 3.2404, "step": 125650 }, { "epoch": 1.1100513961744292, "grad_norm": 1.2967584133148193, "learning_rate": 8.847065136636403e-05, "loss": 3.1253, "step": 125700 }, { "epoch": 1.1104929440647133, "grad_norm": 2.1886446475982666, "learning_rate": 8.846177367403088e-05, "loss": 3.3288, "step": 125750 }, { "epoch": 1.1109344919549975, "grad_norm": 2.3884706497192383, "learning_rate": 8.845289301085184e-05, "loss": 3.7135, "step": 125800 }, { "epoch": 1.1113760398452817, "grad_norm": 1.3103140592575073, "learning_rate": 8.844400937751291e-05, "loss": 2.7412, "step": 125850 }, { "epoch": 1.1118175877355658, "grad_norm": 2.5914974212646484, "learning_rate": 8.843512277470023e-05, "loss": 2.9458, "step": 125900 }, { "epoch": 1.11225913562585, "grad_norm": 1.5716588497161865, "learning_rate": 8.842623320310023e-05, "loss": 3.558, "step": 125950 }, { "epoch": 1.1127006835161342, "grad_norm": 2.2623939514160156, "learning_rate": 8.841734066339959e-05, "loss": 3.2775, "step": 126000 }, { "epoch": 1.1127006835161342, "eval_asr_loss": 0.9239404375777175, "eval_loss": 2.9568049907684326, "eval_runtime": 20.3306, "eval_samples_per_second": 37.776, "eval_steps_per_second": 9.444, "eval_tts_loss": 6.011922956928332, "step": 126000 }, { "epoch": 1.1131422314064183, "grad_norm": 1.213431477546692, "learning_rate": 8.840844515628515e-05, "loss": 3.3045, "step": 126050 }, { "epoch": 1.1135837792967025, "grad_norm": 1.4413515329360962, "learning_rate": 8.839954668244399e-05, "loss": 3.3209, "step": 126100 }, { "epoch": 1.1140253271869867, "grad_norm": 0.6168299913406372, "learning_rate": 8.839064524256348e-05, "loss": 3.7805, "step": 126150 }, { "epoch": 1.1144668750772708, "grad_norm": 1.9081262350082397, "learning_rate": 8.838174083733117e-05, "loss": 3.1688, "step": 126200 }, { "epoch": 1.114908422967555, "grad_norm": 1.6664979457855225, "learning_rate": 8.837283346743485e-05, "loss": 3.2032, "step": 126250 }, { "epoch": 1.1153499708578392, "grad_norm": 5.024023532867432, "learning_rate": 8.836392313356255e-05, "loss": 3.2986, "step": 126300 }, { "epoch": 1.1157915187481233, "grad_norm": 1.7272241115570068, "learning_rate": 8.835500983640248e-05, "loss": 2.9631, "step": 126350 }, { "epoch": 1.1162330666384075, "grad_norm": 4.159533977508545, "learning_rate": 8.834609357664314e-05, "loss": 3.1184, "step": 126400 }, { "epoch": 1.1166746145286919, "grad_norm": 1.9124999046325684, "learning_rate": 8.833717435497325e-05, "loss": 3.4903, "step": 126450 }, { "epoch": 1.117116162418976, "grad_norm": 3.8171277046203613, "learning_rate": 8.832825217208174e-05, "loss": 2.9298, "step": 126500 }, { "epoch": 1.1175577103092602, "grad_norm": 3.396512508392334, "learning_rate": 8.831932702865774e-05, "loss": 3.398, "step": 126550 }, { "epoch": 1.1179992581995444, "grad_norm": 0.947153627872467, "learning_rate": 8.831039892539069e-05, "loss": 3.2457, "step": 126600 }, { "epoch": 1.1184408060898285, "grad_norm": 5.9848456382751465, "learning_rate": 8.830146786297016e-05, "loss": 2.9657, "step": 126650 }, { "epoch": 1.1188823539801127, "grad_norm": 1.9373352527618408, "learning_rate": 8.829253384208604e-05, "loss": 3.4747, "step": 126700 }, { "epoch": 1.1193239018703969, "grad_norm": 2.3222639560699463, "learning_rate": 8.828359686342839e-05, "loss": 3.1783, "step": 126750 }, { "epoch": 1.119765449760681, "grad_norm": 1.1654438972473145, "learning_rate": 8.82746569276875e-05, "loss": 3.1918, "step": 126800 }, { "epoch": 1.1202069976509652, "grad_norm": 0.7595392465591431, "learning_rate": 8.826571403555395e-05, "loss": 3.2987, "step": 126850 }, { "epoch": 1.1206485455412494, "grad_norm": 3.6976282596588135, "learning_rate": 8.825676818771846e-05, "loss": 3.1541, "step": 126900 }, { "epoch": 1.1210900934315335, "grad_norm": 4.80150842666626, "learning_rate": 8.824799838988361e-05, "loss": 2.8695, "step": 126950 }, { "epoch": 1.1215316413218177, "grad_norm": 2.4069314002990723, "learning_rate": 8.823904669179708e-05, "loss": 3.416, "step": 127000 }, { "epoch": 1.1219731892121019, "grad_norm": 3.3130557537078857, "learning_rate": 8.823009204006847e-05, "loss": 3.0622, "step": 127050 }, { "epoch": 1.122414737102386, "grad_norm": 4.023521900177002, "learning_rate": 8.822113443538942e-05, "loss": 3.5856, "step": 127100 }, { "epoch": 1.1228562849926702, "grad_norm": 2.4293367862701416, "learning_rate": 8.821217387845184e-05, "loss": 3.1485, "step": 127150 }, { "epoch": 1.1232978328829546, "grad_norm": 1.6036757230758667, "learning_rate": 8.820321036994787e-05, "loss": 3.402, "step": 127200 }, { "epoch": 1.1237393807732388, "grad_norm": 5.501082420349121, "learning_rate": 8.819424391056985e-05, "loss": 3.4117, "step": 127250 }, { "epoch": 1.124180928663523, "grad_norm": 0.864111065864563, "learning_rate": 8.818527450101035e-05, "loss": 2.8882, "step": 127300 }, { "epoch": 1.124622476553807, "grad_norm": 1.7957173585891724, "learning_rate": 8.817630214196222e-05, "loss": 3.1329, "step": 127350 }, { "epoch": 1.1250640244440913, "grad_norm": 2.793177604675293, "learning_rate": 8.816732683411846e-05, "loss": 3.602, "step": 127400 }, { "epoch": 1.1255055723343754, "grad_norm": 3.6395950317382812, "learning_rate": 8.815834857817234e-05, "loss": 3.6478, "step": 127450 }, { "epoch": 1.1259471202246596, "grad_norm": 3.0564305782318115, "learning_rate": 8.814936737481739e-05, "loss": 3.1558, "step": 127500 }, { "epoch": 1.1263886681149438, "grad_norm": 2.4412167072296143, "learning_rate": 8.81403832247473e-05, "loss": 3.0928, "step": 127550 }, { "epoch": 1.126830216005228, "grad_norm": 0.747972309589386, "learning_rate": 8.813139612865601e-05, "loss": 3.1837, "step": 127600 }, { "epoch": 1.127271763895512, "grad_norm": 1.7288129329681396, "learning_rate": 8.812240608723774e-05, "loss": 3.0548, "step": 127650 }, { "epoch": 1.1277133117857963, "grad_norm": 2.70778226852417, "learning_rate": 8.811359298976078e-05, "loss": 3.1048, "step": 127700 }, { "epoch": 1.1281548596760804, "grad_norm": 2.5320427417755127, "learning_rate": 8.810459711864387e-05, "loss": 3.8118, "step": 127750 }, { "epoch": 1.1285964075663646, "grad_norm": 1.2388442754745483, "learning_rate": 8.809559830426997e-05, "loss": 3.3423, "step": 127800 }, { "epoch": 1.1290379554566488, "grad_norm": 1.5890963077545166, "learning_rate": 8.808659654733411e-05, "loss": 2.7988, "step": 127850 }, { "epoch": 1.129479503346933, "grad_norm": 3.5289838314056396, "learning_rate": 8.807759184853165e-05, "loss": 3.3222, "step": 127900 }, { "epoch": 1.129921051237217, "grad_norm": 2.6470303535461426, "learning_rate": 8.806858420855813e-05, "loss": 2.5924, "step": 127950 }, { "epoch": 1.1303625991275013, "grad_norm": 1.1539044380187988, "learning_rate": 8.805957362810926e-05, "loss": 3.5027, "step": 128000 }, { "epoch": 1.1308041470177856, "grad_norm": 2.7577285766601562, "learning_rate": 8.805056010788108e-05, "loss": 2.8623, "step": 128050 }, { "epoch": 1.1312456949080698, "grad_norm": 3.473392963409424, "learning_rate": 8.804154364856979e-05, "loss": 3.6265, "step": 128100 }, { "epoch": 1.131687242798354, "grad_norm": 1.1800062656402588, "learning_rate": 8.803252425087183e-05, "loss": 3.161, "step": 128150 }, { "epoch": 1.1321287906886381, "grad_norm": 3.8547286987304688, "learning_rate": 8.802350191548387e-05, "loss": 3.247, "step": 128200 }, { "epoch": 1.1325703385789223, "grad_norm": 2.5325534343719482, "learning_rate": 8.801447664310282e-05, "loss": 3.2594, "step": 128250 }, { "epoch": 1.1330118864692065, "grad_norm": 2.247164726257324, "learning_rate": 8.800544843442582e-05, "loss": 3.6423, "step": 128300 }, { "epoch": 1.1334534343594906, "grad_norm": 4.375690460205078, "learning_rate": 8.799641729015019e-05, "loss": 3.6558, "step": 128350 }, { "epoch": 1.1338949822497748, "grad_norm": 3.372756242752075, "learning_rate": 8.798738321097353e-05, "loss": 3.1197, "step": 128400 }, { "epoch": 1.134336530140059, "grad_norm": 3.348130464553833, "learning_rate": 8.797834619759363e-05, "loss": 2.9325, "step": 128450 }, { "epoch": 1.1347780780303431, "grad_norm": 0.7869385480880737, "learning_rate": 8.796930625070853e-05, "loss": 3.1624, "step": 128500 }, { "epoch": 1.1352196259206273, "grad_norm": 2.3363020420074463, "learning_rate": 8.796026337101653e-05, "loss": 3.1033, "step": 128550 }, { "epoch": 1.1356611738109115, "grad_norm": 1.4216840267181396, "learning_rate": 8.795121755921604e-05, "loss": 3.6407, "step": 128600 }, { "epoch": 1.1361027217011956, "grad_norm": 1.4757707118988037, "learning_rate": 8.794216881600583e-05, "loss": 3.5712, "step": 128650 }, { "epoch": 1.1365442695914798, "grad_norm": 1.487884283065796, "learning_rate": 8.793311714208481e-05, "loss": 3.1033, "step": 128700 }, { "epoch": 1.1369858174817642, "grad_norm": 3.3556032180786133, "learning_rate": 8.792406253815215e-05, "loss": 2.8425, "step": 128750 }, { "epoch": 1.1374273653720484, "grad_norm": 1.508814811706543, "learning_rate": 8.791500500490726e-05, "loss": 3.3935, "step": 128800 }, { "epoch": 1.1378689132623325, "grad_norm": 1.8587937355041504, "learning_rate": 8.790594454304974e-05, "loss": 3.4744, "step": 128850 }, { "epoch": 1.1383104611526167, "grad_norm": 0.7840842604637146, "learning_rate": 8.789688115327944e-05, "loss": 3.2839, "step": 128900 }, { "epoch": 1.1387520090429009, "grad_norm": 3.428917646408081, "learning_rate": 8.78878148362964e-05, "loss": 3.228, "step": 128950 }, { "epoch": 1.139193556933185, "grad_norm": 1.454732894897461, "learning_rate": 8.787874559280096e-05, "loss": 2.5992, "step": 129000 }, { "epoch": 1.139193556933185, "eval_asr_loss": 0.9056011822879443, "eval_loss": 2.9360103607177734, "eval_runtime": 20.5894, "eval_samples_per_second": 37.301, "eval_steps_per_second": 9.325, "eval_tts_loss": 6.019627850743963, "step": 129000 }, { "epoch": 1.1396351048234692, "grad_norm": 3.9071409702301025, "learning_rate": 8.786967342349364e-05, "loss": 2.7655, "step": 129050 }, { "epoch": 1.1400766527137534, "grad_norm": 1.2048531770706177, "learning_rate": 8.786059832907516e-05, "loss": 2.8011, "step": 129100 }, { "epoch": 1.1405182006040375, "grad_norm": 1.1813719272613525, "learning_rate": 8.78515203102465e-05, "loss": 3.2859, "step": 129150 }, { "epoch": 1.1409597484943217, "grad_norm": 5.590451717376709, "learning_rate": 8.784243936770889e-05, "loss": 3.3888, "step": 129200 }, { "epoch": 1.1414012963846059, "grad_norm": 1.4032896757125854, "learning_rate": 8.783335550216372e-05, "loss": 3.2597, "step": 129250 }, { "epoch": 1.14184284427489, "grad_norm": 1.308907151222229, "learning_rate": 8.782426871431265e-05, "loss": 3.2236, "step": 129300 }, { "epoch": 1.1422843921651742, "grad_norm": 4.8072028160095215, "learning_rate": 8.781517900485757e-05, "loss": 2.996, "step": 129350 }, { "epoch": 1.1427259400554584, "grad_norm": 1.4444926977157593, "learning_rate": 8.780608637450056e-05, "loss": 2.6338, "step": 129400 }, { "epoch": 1.1431674879457425, "grad_norm": 4.736634731292725, "learning_rate": 8.7796990823944e-05, "loss": 3.0496, "step": 129450 }, { "epoch": 1.1436090358360267, "grad_norm": 1.504779577255249, "learning_rate": 8.778789235389037e-05, "loss": 3.0389, "step": 129500 }, { "epoch": 1.1440505837263109, "grad_norm": 3.4063234329223633, "learning_rate": 8.77787909650425e-05, "loss": 3.2908, "step": 129550 }, { "epoch": 1.144492131616595, "grad_norm": 2.3659403324127197, "learning_rate": 8.776968665810339e-05, "loss": 3.567, "step": 129600 }, { "epoch": 1.1449336795068794, "grad_norm": 2.675527334213257, "learning_rate": 8.776057943377624e-05, "loss": 3.2116, "step": 129650 }, { "epoch": 1.1453752273971636, "grad_norm": 2.0578064918518066, "learning_rate": 8.775146929276457e-05, "loss": 3.227, "step": 129700 }, { "epoch": 1.1458167752874477, "grad_norm": 3.0254499912261963, "learning_rate": 8.774235623577199e-05, "loss": 2.9167, "step": 129750 }, { "epoch": 1.146258323177732, "grad_norm": 1.1867756843566895, "learning_rate": 8.773324026350245e-05, "loss": 3.1534, "step": 129800 }, { "epoch": 1.146699871068016, "grad_norm": 0.8575452566146851, "learning_rate": 8.772412137666005e-05, "loss": 3.04, "step": 129850 }, { "epoch": 1.1471414189583002, "grad_norm": 1.0062538385391235, "learning_rate": 8.771499957594917e-05, "loss": 3.2266, "step": 129900 }, { "epoch": 1.1475829668485844, "grad_norm": 2.8599894046783447, "learning_rate": 8.77058748620744e-05, "loss": 3.3753, "step": 129950 }, { "epoch": 1.1480245147388686, "grad_norm": 2.6259355545043945, "learning_rate": 8.769674723574052e-05, "loss": 3.4298, "step": 130000 }, { "epoch": 1.1484660626291527, "grad_norm": 0.4966769516468048, "learning_rate": 8.76876166976526e-05, "loss": 3.2482, "step": 130050 }, { "epoch": 1.148907610519437, "grad_norm": 4.237811088562012, "learning_rate": 8.767848324851584e-05, "loss": 3.8295, "step": 130100 }, { "epoch": 1.149349158409721, "grad_norm": 1.2462416887283325, "learning_rate": 8.766934688903577e-05, "loss": 2.9447, "step": 130150 }, { "epoch": 1.1497907063000052, "grad_norm": 4.463748455047607, "learning_rate": 8.766020761991808e-05, "loss": 3.1586, "step": 130200 }, { "epoch": 1.1502322541902894, "grad_norm": 1.7702147960662842, "learning_rate": 8.76510654418687e-05, "loss": 3.2017, "step": 130250 }, { "epoch": 1.1506738020805736, "grad_norm": 1.8763153553009033, "learning_rate": 8.764192035559378e-05, "loss": 3.2077, "step": 130300 }, { "epoch": 1.151115349970858, "grad_norm": 1.445235013961792, "learning_rate": 8.763277236179971e-05, "loss": 2.9795, "step": 130350 }, { "epoch": 1.1515568978611421, "grad_norm": 3.101717233657837, "learning_rate": 8.762362146119309e-05, "loss": 3.5737, "step": 130400 }, { "epoch": 1.1519984457514263, "grad_norm": 2.088386297225952, "learning_rate": 8.761446765448076e-05, "loss": 3.071, "step": 130450 }, { "epoch": 1.1524399936417105, "grad_norm": 1.0259060859680176, "learning_rate": 8.760531094236975e-05, "loss": 3.0661, "step": 130500 }, { "epoch": 1.1528815415319946, "grad_norm": 1.4046655893325806, "learning_rate": 8.759615132556736e-05, "loss": 2.9523, "step": 130550 }, { "epoch": 1.1533230894222788, "grad_norm": 2.649925947189331, "learning_rate": 8.75869888047811e-05, "loss": 3.2149, "step": 130600 }, { "epoch": 1.153764637312563, "grad_norm": 3.931152582168579, "learning_rate": 8.757782338071866e-05, "loss": 3.5996, "step": 130650 }, { "epoch": 1.1542061852028471, "grad_norm": 0.7651322484016418, "learning_rate": 8.756865505408802e-05, "loss": 2.8727, "step": 130700 }, { "epoch": 1.1546477330931313, "grad_norm": 3.1403989791870117, "learning_rate": 8.755948382559735e-05, "loss": 3.5253, "step": 130750 }, { "epoch": 1.1550892809834155, "grad_norm": 0.9329702854156494, "learning_rate": 8.755030969595505e-05, "loss": 2.9106, "step": 130800 }, { "epoch": 1.1555308288736996, "grad_norm": 1.3300598859786987, "learning_rate": 8.754113266586977e-05, "loss": 3.4129, "step": 130850 }, { "epoch": 1.1559723767639838, "grad_norm": 4.641824722290039, "learning_rate": 8.75319527360503e-05, "loss": 3.3374, "step": 130900 }, { "epoch": 1.156413924654268, "grad_norm": 2.1139044761657715, "learning_rate": 8.752276990720576e-05, "loss": 3.4172, "step": 130950 }, { "epoch": 1.1568554725445521, "grad_norm": 1.589998483657837, "learning_rate": 8.751358418004542e-05, "loss": 2.8262, "step": 131000 }, { "epoch": 1.1572970204348363, "grad_norm": 1.7650665044784546, "learning_rate": 8.75043955552788e-05, "loss": 3.6119, "step": 131050 }, { "epoch": 1.1577385683251205, "grad_norm": 4.2411603927612305, "learning_rate": 8.749520403361566e-05, "loss": 3.1808, "step": 131100 }, { "epoch": 1.1581801162154046, "grad_norm": 1.8814787864685059, "learning_rate": 8.748600961576596e-05, "loss": 2.9927, "step": 131150 }, { "epoch": 1.1586216641056888, "grad_norm": 2.2863402366638184, "learning_rate": 8.74768123024399e-05, "loss": 3.0464, "step": 131200 }, { "epoch": 1.1590632119959732, "grad_norm": 1.000219464302063, "learning_rate": 8.746761209434786e-05, "loss": 3.3005, "step": 131250 }, { "epoch": 1.1595047598862573, "grad_norm": 3.1498961448669434, "learning_rate": 8.745840899220051e-05, "loss": 2.8365, "step": 131300 }, { "epoch": 1.1599463077765415, "grad_norm": 6.975586891174316, "learning_rate": 8.744920299670871e-05, "loss": 3.2125, "step": 131350 }, { "epoch": 1.1603878556668257, "grad_norm": 1.3487223386764526, "learning_rate": 8.743999410858354e-05, "loss": 3.1216, "step": 131400 }, { "epoch": 1.1608294035571098, "grad_norm": 2.6848034858703613, "learning_rate": 8.74307823285363e-05, "loss": 2.8299, "step": 131450 }, { "epoch": 1.161270951447394, "grad_norm": 3.3084640502929688, "learning_rate": 8.742156765727853e-05, "loss": 3.5291, "step": 131500 }, { "epoch": 1.1617124993376782, "grad_norm": 3.785712480545044, "learning_rate": 8.741235009552197e-05, "loss": 3.1219, "step": 131550 }, { "epoch": 1.1621540472279623, "grad_norm": 2.4641273021698, "learning_rate": 8.740312964397863e-05, "loss": 3.6626, "step": 131600 }, { "epoch": 1.1625955951182465, "grad_norm": 2.6847565174102783, "learning_rate": 8.73939063033607e-05, "loss": 3.425, "step": 131650 }, { "epoch": 1.1630371430085307, "grad_norm": 3.19730281829834, "learning_rate": 8.738468007438059e-05, "loss": 3.5178, "step": 131700 }, { "epoch": 1.1634786908988148, "grad_norm": 1.92465341091156, "learning_rate": 8.737545095775094e-05, "loss": 3.1112, "step": 131750 }, { "epoch": 1.163920238789099, "grad_norm": 3.2829654216766357, "learning_rate": 8.736621895418467e-05, "loss": 3.4692, "step": 131800 }, { "epoch": 1.1643617866793832, "grad_norm": 3.4545881748199463, "learning_rate": 8.7357168790471e-05, "loss": 2.9955, "step": 131850 }, { "epoch": 1.1648033345696673, "grad_norm": 1.9514338970184326, "learning_rate": 8.734793107287413e-05, "loss": 2.8255, "step": 131900 }, { "epoch": 1.1652448824599517, "grad_norm": 2.7962305545806885, "learning_rate": 8.73386904704663e-05, "loss": 3.2173, "step": 131950 }, { "epoch": 1.165686430350236, "grad_norm": 4.475177764892578, "learning_rate": 8.732963188195087e-05, "loss": 3.1302, "step": 132000 }, { "epoch": 1.165686430350236, "eval_asr_loss": 0.9138820392081759, "eval_loss": 2.9405391216278076, "eval_runtime": 20.3253, "eval_samples_per_second": 37.785, "eval_steps_per_second": 9.446, "eval_tts_loss": 5.998060327010354, "step": 132000 }, { "epoch": 1.16612797824052, "grad_norm": 6.156948089599609, "learning_rate": 8.732038556972324e-05, "loss": 2.9282, "step": 132050 }, { "epoch": 1.1665695261308042, "grad_norm": 3.0831196308135986, "learning_rate": 8.731113637481229e-05, "loss": 3.2877, "step": 132100 }, { "epoch": 1.1670110740210884, "grad_norm": 2.766617774963379, "learning_rate": 8.730188429793244e-05, "loss": 3.6187, "step": 132150 }, { "epoch": 1.1674526219113726, "grad_norm": 2.436147451400757, "learning_rate": 8.729262933979835e-05, "loss": 3.2395, "step": 132200 }, { "epoch": 1.1678941698016567, "grad_norm": 7.238566875457764, "learning_rate": 8.728337150112486e-05, "loss": 3.2521, "step": 132250 }, { "epoch": 1.168335717691941, "grad_norm": 1.9058992862701416, "learning_rate": 8.727411078262711e-05, "loss": 3.2199, "step": 132300 }, { "epoch": 1.168777265582225, "grad_norm": 1.8208065032958984, "learning_rate": 8.726484718502035e-05, "loss": 3.5917, "step": 132350 }, { "epoch": 1.1692188134725092, "grad_norm": 2.439476490020752, "learning_rate": 8.725558070902014e-05, "loss": 3.2484, "step": 132400 }, { "epoch": 1.1696603613627934, "grad_norm": 2.0772078037261963, "learning_rate": 8.724631135534225e-05, "loss": 3.0087, "step": 132450 }, { "epoch": 1.1701019092530776, "grad_norm": 1.8260692358016968, "learning_rate": 8.723703912470264e-05, "loss": 3.1512, "step": 132500 }, { "epoch": 1.1705434571433617, "grad_norm": 2.0503768920898438, "learning_rate": 8.722776401781751e-05, "loss": 3.2767, "step": 132550 }, { "epoch": 1.1709850050336459, "grad_norm": 1.1883853673934937, "learning_rate": 8.721848603540331e-05, "loss": 3.66, "step": 132600 }, { "epoch": 1.17142655292393, "grad_norm": 3.468325138092041, "learning_rate": 8.720920517817665e-05, "loss": 2.6868, "step": 132650 }, { "epoch": 1.1718681008142142, "grad_norm": 2.5057759284973145, "learning_rate": 8.719992144685442e-05, "loss": 3.3156, "step": 132700 }, { "epoch": 1.1723096487044984, "grad_norm": 2.8704116344451904, "learning_rate": 8.719063484215372e-05, "loss": 3.309, "step": 132750 }, { "epoch": 1.1727511965947826, "grad_norm": 2.1519346237182617, "learning_rate": 8.718134536479184e-05, "loss": 3.0556, "step": 132800 }, { "epoch": 1.173192744485067, "grad_norm": 1.5782400369644165, "learning_rate": 8.717205301548631e-05, "loss": 3.1093, "step": 132850 }, { "epoch": 1.173634292375351, "grad_norm": 3.0821375846862793, "learning_rate": 8.71627577949549e-05, "loss": 3.0993, "step": 132900 }, { "epoch": 1.1740758402656353, "grad_norm": 1.1189353466033936, "learning_rate": 8.715345970391557e-05, "loss": 2.7387, "step": 132950 }, { "epoch": 1.1745173881559194, "grad_norm": 2.6682019233703613, "learning_rate": 8.714415874308655e-05, "loss": 2.7153, "step": 133000 }, { "epoch": 1.1749589360462036, "grad_norm": 1.7515902519226074, "learning_rate": 8.713485491318622e-05, "loss": 3.4875, "step": 133050 }, { "epoch": 1.1754004839364878, "grad_norm": 1.7068076133728027, "learning_rate": 8.712554821493326e-05, "loss": 3.2121, "step": 133100 }, { "epoch": 1.175842031826772, "grad_norm": 3.6428372859954834, "learning_rate": 8.71162386490465e-05, "loss": 3.0579, "step": 133150 }, { "epoch": 1.176283579717056, "grad_norm": 0.455516517162323, "learning_rate": 8.710692621624506e-05, "loss": 2.8844, "step": 133200 }, { "epoch": 1.1767251276073403, "grad_norm": 1.2548800706863403, "learning_rate": 8.709761091724821e-05, "loss": 2.8791, "step": 133250 }, { "epoch": 1.1771666754976244, "grad_norm": 1.0584521293640137, "learning_rate": 8.70882927527755e-05, "loss": 3.4511, "step": 133300 }, { "epoch": 1.1776082233879086, "grad_norm": 0.7974949479103088, "learning_rate": 8.707897172354666e-05, "loss": 3.153, "step": 133350 }, { "epoch": 1.1780497712781928, "grad_norm": 2.0270535945892334, "learning_rate": 8.70696478302817e-05, "loss": 3.2497, "step": 133400 }, { "epoch": 1.178491319168477, "grad_norm": 1.8549659252166748, "learning_rate": 8.706032107370079e-05, "loss": 3.1353, "step": 133450 }, { "epoch": 1.178932867058761, "grad_norm": 3.472029685974121, "learning_rate": 8.705099145452432e-05, "loss": 3.4737, "step": 133500 }, { "epoch": 1.1793744149490455, "grad_norm": 2.2693088054656982, "learning_rate": 8.704165897347294e-05, "loss": 2.7868, "step": 133550 }, { "epoch": 1.1798159628393297, "grad_norm": 1.4861036539077759, "learning_rate": 8.703232363126753e-05, "loss": 3.4654, "step": 133600 }, { "epoch": 1.1802575107296138, "grad_norm": 1.4503135681152344, "learning_rate": 8.702298542862913e-05, "loss": 3.1721, "step": 133650 }, { "epoch": 1.180699058619898, "grad_norm": 1.2555351257324219, "learning_rate": 8.701364436627906e-05, "loss": 2.4155, "step": 133700 }, { "epoch": 1.1811406065101822, "grad_norm": 2.5926589965820312, "learning_rate": 8.700430044493881e-05, "loss": 3.0593, "step": 133750 }, { "epoch": 1.1815821544004663, "grad_norm": 1.0408382415771484, "learning_rate": 8.699495366533015e-05, "loss": 2.9217, "step": 133800 }, { "epoch": 1.1820237022907505, "grad_norm": 3.3952131271362305, "learning_rate": 8.698560402817503e-05, "loss": 3.1385, "step": 133850 }, { "epoch": 1.1824652501810347, "grad_norm": 2.9611570835113525, "learning_rate": 8.697625153419563e-05, "loss": 3.0722, "step": 133900 }, { "epoch": 1.1829067980713188, "grad_norm": 3.736604690551758, "learning_rate": 8.696689618411434e-05, "loss": 3.0413, "step": 133950 }, { "epoch": 1.183348345961603, "grad_norm": 3.9566078186035156, "learning_rate": 8.69575379786538e-05, "loss": 3.11, "step": 134000 }, { "epoch": 1.1837898938518872, "grad_norm": 3.8381118774414062, "learning_rate": 8.694817691853682e-05, "loss": 2.9578, "step": 134050 }, { "epoch": 1.1842314417421713, "grad_norm": 1.4392008781433105, "learning_rate": 8.693881300448651e-05, "loss": 3.1636, "step": 134100 }, { "epoch": 1.1846729896324555, "grad_norm": 2.4134435653686523, "learning_rate": 8.692944623722612e-05, "loss": 3.2271, "step": 134150 }, { "epoch": 1.1851145375227397, "grad_norm": 3.1550133228302, "learning_rate": 8.692007661747917e-05, "loss": 3.7818, "step": 134200 }, { "epoch": 1.1855560854130238, "grad_norm": 1.9582149982452393, "learning_rate": 8.691070414596936e-05, "loss": 3.145, "step": 134250 }, { "epoch": 1.185997633303308, "grad_norm": 1.6500295400619507, "learning_rate": 8.690132882342064e-05, "loss": 3.1886, "step": 134300 }, { "epoch": 1.1864391811935922, "grad_norm": 3.904228687286377, "learning_rate": 8.68919506505572e-05, "loss": 3.3516, "step": 134350 }, { "epoch": 1.1868807290838763, "grad_norm": 3.5070960521698, "learning_rate": 8.688256962810339e-05, "loss": 3.1526, "step": 134400 }, { "epoch": 1.1873222769741607, "grad_norm": 3.185276508331299, "learning_rate": 8.687318575678385e-05, "loss": 3.4449, "step": 134450 }, { "epoch": 1.1877638248644449, "grad_norm": 3.7565488815307617, "learning_rate": 8.686379903732338e-05, "loss": 3.4953, "step": 134500 }, { "epoch": 1.188205372754729, "grad_norm": 0.8475595116615295, "learning_rate": 8.685440947044703e-05, "loss": 3.0571, "step": 134550 }, { "epoch": 1.1886469206450132, "grad_norm": 4.451568603515625, "learning_rate": 8.684501705688006e-05, "loss": 3.0146, "step": 134600 }, { "epoch": 1.1890884685352974, "grad_norm": 1.2072436809539795, "learning_rate": 8.683562179734796e-05, "loss": 2.8346, "step": 134650 }, { "epoch": 1.1895300164255815, "grad_norm": 1.3233250379562378, "learning_rate": 8.682622369257644e-05, "loss": 2.8381, "step": 134700 }, { "epoch": 1.1899715643158657, "grad_norm": 1.7606003284454346, "learning_rate": 8.681682274329141e-05, "loss": 3.3425, "step": 134750 }, { "epoch": 1.1904131122061499, "grad_norm": 1.8309063911437988, "learning_rate": 8.680741895021902e-05, "loss": 3.1333, "step": 134800 }, { "epoch": 1.190854660096434, "grad_norm": 5.641381740570068, "learning_rate": 8.679801231408564e-05, "loss": 3.0138, "step": 134850 }, { "epoch": 1.1912962079867182, "grad_norm": 5.100592136383057, "learning_rate": 8.678860283561783e-05, "loss": 3.3786, "step": 134900 }, { "epoch": 1.1917377558770024, "grad_norm": 1.4024208784103394, "learning_rate": 8.677919051554245e-05, "loss": 3.2306, "step": 134950 }, { "epoch": 1.1921793037672865, "grad_norm": 3.9913697242736816, "learning_rate": 8.676977535458644e-05, "loss": 2.5345, "step": 135000 }, { "epoch": 1.1921793037672865, "eval_asr_loss": 0.9235845024814252, "eval_loss": 2.937715768814087, "eval_runtime": 20.5852, "eval_samples_per_second": 37.308, "eval_steps_per_second": 9.327, "eval_tts_loss": 5.992201569014508, "step": 135000 }, { "epoch": 1.1926208516575707, "grad_norm": 3.502964735031128, "learning_rate": 8.67603573534771e-05, "loss": 3.4259, "step": 135050 }, { "epoch": 1.1930623995478549, "grad_norm": 2.7115063667297363, "learning_rate": 8.675093651294186e-05, "loss": 3.2835, "step": 135100 }, { "epoch": 1.1935039474381393, "grad_norm": 2.064483642578125, "learning_rate": 8.674151283370842e-05, "loss": 3.294, "step": 135150 }, { "epoch": 1.1939454953284234, "grad_norm": 1.643545389175415, "learning_rate": 8.673208631650467e-05, "loss": 3.5124, "step": 135200 }, { "epoch": 1.1943870432187076, "grad_norm": 3.38920521736145, "learning_rate": 8.672265696205874e-05, "loss": 3.3957, "step": 135250 }, { "epoch": 1.1948285911089918, "grad_norm": 2.7786691188812256, "learning_rate": 8.671322477109896e-05, "loss": 2.9475, "step": 135300 }, { "epoch": 1.195270138999276, "grad_norm": 1.6041375398635864, "learning_rate": 8.670378974435388e-05, "loss": 3.398, "step": 135350 }, { "epoch": 1.19571168688956, "grad_norm": 1.7173302173614502, "learning_rate": 8.669435188255228e-05, "loss": 3.0656, "step": 135400 }, { "epoch": 1.1961532347798443, "grad_norm": 15.721769332885742, "learning_rate": 8.668491118642316e-05, "loss": 3.0591, "step": 135450 }, { "epoch": 1.1965947826701284, "grad_norm": 2.442812204360962, "learning_rate": 8.667546765669572e-05, "loss": 3.1348, "step": 135500 }, { "epoch": 1.1970363305604126, "grad_norm": 1.5753581523895264, "learning_rate": 8.666621024910873e-05, "loss": 3.1149, "step": 135550 }, { "epoch": 1.1974778784506968, "grad_norm": 2.9639949798583984, "learning_rate": 8.665676111100882e-05, "loss": 3.1263, "step": 135600 }, { "epoch": 1.197919426340981, "grad_norm": 2.138810634613037, "learning_rate": 8.664730914148498e-05, "loss": 3.5442, "step": 135650 }, { "epoch": 1.198360974231265, "grad_norm": 2.3526198863983154, "learning_rate": 8.663785434126725e-05, "loss": 3.5807, "step": 135700 }, { "epoch": 1.1988025221215493, "grad_norm": 1.0211713314056396, "learning_rate": 8.662839671108594e-05, "loss": 3.157, "step": 135750 }, { "epoch": 1.1992440700118334, "grad_norm": 3.1189095973968506, "learning_rate": 8.661893625167158e-05, "loss": 3.0157, "step": 135800 }, { "epoch": 1.1996856179021176, "grad_norm": 1.5837284326553345, "learning_rate": 8.660947296375495e-05, "loss": 2.9758, "step": 135850 }, { "epoch": 1.2001271657924018, "grad_norm": 2.6841766834259033, "learning_rate": 8.660000684806694e-05, "loss": 3.0694, "step": 135900 }, { "epoch": 1.200568713682686, "grad_norm": 3.424384832382202, "learning_rate": 8.659053790533877e-05, "loss": 2.7653, "step": 135950 }, { "epoch": 1.20101026157297, "grad_norm": 2.5790984630584717, "learning_rate": 8.658106613630184e-05, "loss": 3.3548, "step": 136000 }, { "epoch": 1.2014518094632545, "grad_norm": 0.8066131472587585, "learning_rate": 8.657159154168774e-05, "loss": 2.7384, "step": 136050 }, { "epoch": 1.2018933573535386, "grad_norm": 1.3459528684616089, "learning_rate": 8.65621141222283e-05, "loss": 3.3098, "step": 136100 }, { "epoch": 1.2023349052438228, "grad_norm": 1.5684973001480103, "learning_rate": 8.65526338786556e-05, "loss": 3.2824, "step": 136150 }, { "epoch": 1.202776453134107, "grad_norm": 1.0687460899353027, "learning_rate": 8.654315081170187e-05, "loss": 2.8486, "step": 136200 }, { "epoch": 1.2032180010243911, "grad_norm": 1.0528970956802368, "learning_rate": 8.653366492209962e-05, "loss": 3.1545, "step": 136250 }, { "epoch": 1.2036595489146753, "grad_norm": 4.352191925048828, "learning_rate": 8.652417621058158e-05, "loss": 3.1152, "step": 136300 }, { "epoch": 1.2041010968049595, "grad_norm": 1.8942192792892456, "learning_rate": 8.651468467788061e-05, "loss": 3.6867, "step": 136350 }, { "epoch": 1.2045426446952436, "grad_norm": 2.341681480407715, "learning_rate": 8.65051903247299e-05, "loss": 3.1624, "step": 136400 }, { "epoch": 1.2049841925855278, "grad_norm": 1.9934921264648438, "learning_rate": 8.649569315186279e-05, "loss": 3.2298, "step": 136450 }, { "epoch": 1.205425740475812, "grad_norm": 1.4484180212020874, "learning_rate": 8.648619316001286e-05, "loss": 3.3831, "step": 136500 }, { "epoch": 1.2058672883660961, "grad_norm": 1.979071855545044, "learning_rate": 8.647669034991389e-05, "loss": 2.9035, "step": 136550 }, { "epoch": 1.2063088362563803, "grad_norm": 2.6589207649230957, "learning_rate": 8.646718472229991e-05, "loss": 3.3321, "step": 136600 }, { "epoch": 1.2067503841466645, "grad_norm": 1.3302929401397705, "learning_rate": 8.645767627790515e-05, "loss": 3.5172, "step": 136650 }, { "epoch": 1.2071919320369486, "grad_norm": 4.300166606903076, "learning_rate": 8.644816501746405e-05, "loss": 3.1273, "step": 136700 }, { "epoch": 1.207633479927233, "grad_norm": 2.7277801036834717, "learning_rate": 8.643865094171126e-05, "loss": 3.6246, "step": 136750 }, { "epoch": 1.2080750278175172, "grad_norm": 1.1152167320251465, "learning_rate": 8.642913405138168e-05, "loss": 2.9205, "step": 136800 }, { "epoch": 1.2085165757078014, "grad_norm": 3.885781764984131, "learning_rate": 8.641961434721042e-05, "loss": 3.4886, "step": 136850 }, { "epoch": 1.2089581235980855, "grad_norm": 1.0696148872375488, "learning_rate": 8.641009182993276e-05, "loss": 2.7299, "step": 136900 }, { "epoch": 1.2093996714883697, "grad_norm": 2.8801472187042236, "learning_rate": 8.640056650028428e-05, "loss": 3.257, "step": 136950 }, { "epoch": 1.2098412193786539, "grad_norm": 3.712364435195923, "learning_rate": 8.639103835900069e-05, "loss": 3.5346, "step": 137000 }, { "epoch": 1.210282767268938, "grad_norm": 1.6460645198822021, "learning_rate": 8.638150740681796e-05, "loss": 3.5413, "step": 137050 }, { "epoch": 1.2107243151592222, "grad_norm": 1.8404775857925415, "learning_rate": 8.637197364447231e-05, "loss": 3.5827, "step": 137100 }, { "epoch": 1.2111658630495064, "grad_norm": 4.571069240570068, "learning_rate": 8.63624370727001e-05, "loss": 3.0262, "step": 137150 }, { "epoch": 1.2116074109397905, "grad_norm": 3.377689838409424, "learning_rate": 8.6352897692238e-05, "loss": 3.0937, "step": 137200 }, { "epoch": 1.2120489588300747, "grad_norm": 3.101484775543213, "learning_rate": 8.63433555038228e-05, "loss": 3.4086, "step": 137250 }, { "epoch": 1.2124905067203589, "grad_norm": 3.2903013229370117, "learning_rate": 8.633381050819157e-05, "loss": 3.3043, "step": 137300 }, { "epoch": 1.212932054610643, "grad_norm": 9.131916046142578, "learning_rate": 8.632426270608159e-05, "loss": 3.4324, "step": 137350 }, { "epoch": 1.2133736025009272, "grad_norm": 1.1834709644317627, "learning_rate": 8.631471209823032e-05, "loss": 3.271, "step": 137400 }, { "epoch": 1.2138151503912114, "grad_norm": 1.6877617835998535, "learning_rate": 8.63051586853755e-05, "loss": 2.7578, "step": 137450 }, { "epoch": 1.2142566982814955, "grad_norm": 3.62005352973938, "learning_rate": 8.629560246825501e-05, "loss": 2.5288, "step": 137500 }, { "epoch": 1.2146982461717797, "grad_norm": 2.522944450378418, "learning_rate": 8.628604344760701e-05, "loss": 2.7992, "step": 137550 }, { "epoch": 1.2151397940620638, "grad_norm": 2.285325050354004, "learning_rate": 8.627648162416985e-05, "loss": 3.0256, "step": 137600 }, { "epoch": 1.2155813419523482, "grad_norm": 2.962488889694214, "learning_rate": 8.626691699868211e-05, "loss": 3.6971, "step": 137650 }, { "epoch": 1.2160228898426324, "grad_norm": 9.112953186035156, "learning_rate": 8.625734957188257e-05, "loss": 3.3418, "step": 137700 }, { "epoch": 1.2164644377329166, "grad_norm": 1.3443338871002197, "learning_rate": 8.624777934451022e-05, "loss": 3.3575, "step": 137750 }, { "epoch": 1.2169059856232007, "grad_norm": 6.0494256019592285, "learning_rate": 8.623820631730429e-05, "loss": 3.2255, "step": 137800 }, { "epoch": 1.217347533513485, "grad_norm": 2.178502321243286, "learning_rate": 8.62286304910042e-05, "loss": 3.2216, "step": 137850 }, { "epoch": 1.217789081403769, "grad_norm": 3.197324275970459, "learning_rate": 8.621905186634964e-05, "loss": 3.1341, "step": 137900 }, { "epoch": 1.2182306292940532, "grad_norm": 1.0135949850082397, "learning_rate": 8.620947044408043e-05, "loss": 3.2025, "step": 137950 }, { "epoch": 1.2186721771843374, "grad_norm": 2.018465995788574, "learning_rate": 8.619988622493669e-05, "loss": 3.1432, "step": 138000 }, { "epoch": 1.2186721771843374, "eval_asr_loss": 0.9245979018125478, "eval_loss": 2.9356586933135986, "eval_runtime": 20.4935, "eval_samples_per_second": 37.475, "eval_steps_per_second": 9.369, "eval_tts_loss": 5.940896828933876, "step": 138000 }, { "epoch": 1.2191137250746216, "grad_norm": 3.4536030292510986, "learning_rate": 8.61902992096587e-05, "loss": 3.4214, "step": 138050 }, { "epoch": 1.2195552729649057, "grad_norm": 2.176469087600708, "learning_rate": 8.618070939898699e-05, "loss": 3.1659, "step": 138100 }, { "epoch": 1.21999682085519, "grad_norm": 4.467554092407227, "learning_rate": 8.617111679366226e-05, "loss": 2.888, "step": 138150 }, { "epoch": 1.220438368745474, "grad_norm": 1.241233468055725, "learning_rate": 8.616152139442551e-05, "loss": 2.8083, "step": 138200 }, { "epoch": 1.2208799166357582, "grad_norm": 2.9702117443084717, "learning_rate": 8.615192320201784e-05, "loss": 3.3338, "step": 138250 }, { "epoch": 1.2213214645260424, "grad_norm": 1.1560981273651123, "learning_rate": 8.614232221718069e-05, "loss": 3.3368, "step": 138300 }, { "epoch": 1.2217630124163268, "grad_norm": 0.8796913623809814, "learning_rate": 8.613271844065561e-05, "loss": 3.2698, "step": 138350 }, { "epoch": 1.222204560306611, "grad_norm": 3.3393924236297607, "learning_rate": 8.612311187318442e-05, "loss": 3.3047, "step": 138400 }, { "epoch": 1.2226461081968951, "grad_norm": 3.1728880405426025, "learning_rate": 8.611350251550916e-05, "loss": 3.1528, "step": 138450 }, { "epoch": 1.2230876560871793, "grad_norm": 0.9648553133010864, "learning_rate": 8.610389036837207e-05, "loss": 3.1583, "step": 138500 }, { "epoch": 1.2235292039774635, "grad_norm": 2.8733668327331543, "learning_rate": 8.60942754325156e-05, "loss": 3.0704, "step": 138550 }, { "epoch": 1.2239707518677476, "grad_norm": 8.728034973144531, "learning_rate": 8.608465770868244e-05, "loss": 3.431, "step": 138600 }, { "epoch": 1.2244122997580318, "grad_norm": 4.511626243591309, "learning_rate": 8.607503719761542e-05, "loss": 2.9597, "step": 138650 }, { "epoch": 1.224853847648316, "grad_norm": 4.3571014404296875, "learning_rate": 8.606541390005772e-05, "loss": 2.9135, "step": 138700 }, { "epoch": 1.2252953955386001, "grad_norm": 2.458829164505005, "learning_rate": 8.60557878167526e-05, "loss": 3.7083, "step": 138750 }, { "epoch": 1.2257369434288843, "grad_norm": 3.1015942096710205, "learning_rate": 8.604615894844364e-05, "loss": 3.2546, "step": 138800 }, { "epoch": 1.2261784913191685, "grad_norm": 4.6130571365356445, "learning_rate": 8.603652729587455e-05, "loss": 3.2822, "step": 138850 }, { "epoch": 1.2266200392094526, "grad_norm": 0.9187746047973633, "learning_rate": 8.60268928597893e-05, "loss": 2.8447, "step": 138900 }, { "epoch": 1.2270615870997368, "grad_norm": 1.320948600769043, "learning_rate": 8.601725564093209e-05, "loss": 3.2058, "step": 138950 }, { "epoch": 1.227503134990021, "grad_norm": 1.928350567817688, "learning_rate": 8.600761564004727e-05, "loss": 3.5719, "step": 139000 }, { "epoch": 1.2279446828803051, "grad_norm": 1.5674891471862793, "learning_rate": 8.599797285787951e-05, "loss": 2.9566, "step": 139050 }, { "epoch": 1.2283862307705893, "grad_norm": 0.9713530540466309, "learning_rate": 8.598832729517359e-05, "loss": 3.3049, "step": 139100 }, { "epoch": 1.2288277786608734, "grad_norm": 2.7060794830322266, "learning_rate": 8.597867895267455e-05, "loss": 3.2953, "step": 139150 }, { "epoch": 1.2292693265511576, "grad_norm": 1.6016948223114014, "learning_rate": 8.596902783112765e-05, "loss": 3.6453, "step": 139200 }, { "epoch": 1.229710874441442, "grad_norm": 3.066992998123169, "learning_rate": 8.595937393127838e-05, "loss": 3.7555, "step": 139250 }, { "epoch": 1.2301524223317262, "grad_norm": 1.1654362678527832, "learning_rate": 8.59497172538724e-05, "loss": 3.4442, "step": 139300 }, { "epoch": 1.2305939702220103, "grad_norm": 1.2640044689178467, "learning_rate": 8.594005779965559e-05, "loss": 3.2677, "step": 139350 }, { "epoch": 1.2310355181122945, "grad_norm": 1.9451007843017578, "learning_rate": 8.593039556937408e-05, "loss": 3.0464, "step": 139400 }, { "epoch": 1.2314770660025787, "grad_norm": 2.010702610015869, "learning_rate": 8.59207305637742e-05, "loss": 3.5588, "step": 139450 }, { "epoch": 1.2319186138928628, "grad_norm": 1.7903605699539185, "learning_rate": 8.591106278360247e-05, "loss": 2.9184, "step": 139500 }, { "epoch": 1.232360161783147, "grad_norm": 4.142259120941162, "learning_rate": 8.590139222960568e-05, "loss": 3.2671, "step": 139550 }, { "epoch": 1.2328017096734312, "grad_norm": 6.523422718048096, "learning_rate": 8.589171890253073e-05, "loss": 3.1727, "step": 139600 }, { "epoch": 1.2332432575637153, "grad_norm": 2.88853120803833, "learning_rate": 8.588204280312488e-05, "loss": 3.1479, "step": 139650 }, { "epoch": 1.2336848054539995, "grad_norm": 3.082754135131836, "learning_rate": 8.587236393213549e-05, "loss": 2.7581, "step": 139700 }, { "epoch": 1.2341263533442837, "grad_norm": 2.5806145668029785, "learning_rate": 8.586268229031019e-05, "loss": 3.3388, "step": 139750 }, { "epoch": 1.2345679012345678, "grad_norm": 6.057657718658447, "learning_rate": 8.585299787839678e-05, "loss": 3.3327, "step": 139800 }, { "epoch": 1.235009449124852, "grad_norm": 5.6322021484375, "learning_rate": 8.58433106971433e-05, "loss": 3.6759, "step": 139850 }, { "epoch": 1.2354509970151362, "grad_norm": 0.7276424169540405, "learning_rate": 8.583362074729802e-05, "loss": 3.641, "step": 139900 }, { "epoch": 1.2358925449054206, "grad_norm": 0.8799581527709961, "learning_rate": 8.582392802960939e-05, "loss": 3.5811, "step": 139950 }, { "epoch": 1.2363340927957047, "grad_norm": 3.2745556831359863, "learning_rate": 8.58142325448261e-05, "loss": 3.1727, "step": 140000 }, { "epoch": 1.2367756406859889, "grad_norm": 1.2958625555038452, "learning_rate": 8.580472828582498e-05, "loss": 3.2812, "step": 140050 }, { "epoch": 1.237217188576273, "grad_norm": 0.9326485395431519, "learning_rate": 8.579502732440385e-05, "loss": 3.3295, "step": 140100 }, { "epoch": 1.2376587364665572, "grad_norm": 1.5448970794677734, "learning_rate": 8.578532359812039e-05, "loss": 2.8871, "step": 140150 }, { "epoch": 1.2381002843568414, "grad_norm": 2.073150873184204, "learning_rate": 8.577561710772413e-05, "loss": 3.5859, "step": 140200 }, { "epoch": 1.2385418322471256, "grad_norm": 1.6688034534454346, "learning_rate": 8.576590785396483e-05, "loss": 3.0485, "step": 140250 }, { "epoch": 1.2389833801374097, "grad_norm": 2.4727234840393066, "learning_rate": 8.575619583759242e-05, "loss": 3.1929, "step": 140300 }, { "epoch": 1.2394249280276939, "grad_norm": 3.2702481746673584, "learning_rate": 8.574648105935709e-05, "loss": 3.1036, "step": 140350 }, { "epoch": 1.239866475917978, "grad_norm": 2.7264039516448975, "learning_rate": 8.573676352000923e-05, "loss": 3.4255, "step": 140400 }, { "epoch": 1.2403080238082622, "grad_norm": 3.251349925994873, "learning_rate": 8.572704322029942e-05, "loss": 3.3815, "step": 140450 }, { "epoch": 1.2407495716985464, "grad_norm": 3.0462658405303955, "learning_rate": 8.571732016097849e-05, "loss": 3.0341, "step": 140500 }, { "epoch": 1.2411911195888305, "grad_norm": 1.1782664060592651, "learning_rate": 8.570759434279744e-05, "loss": 3.2994, "step": 140550 }, { "epoch": 1.2416326674791147, "grad_norm": 1.4393571615219116, "learning_rate": 8.56978657665075e-05, "loss": 3.3121, "step": 140600 }, { "epoch": 1.2420742153693989, "grad_norm": 1.4110065698623657, "learning_rate": 8.568813443286017e-05, "loss": 3.038, "step": 140650 }, { "epoch": 1.242515763259683, "grad_norm": 1.5697439908981323, "learning_rate": 8.567840034260706e-05, "loss": 2.8957, "step": 140700 }, { "epoch": 1.2429573111499672, "grad_norm": 2.0723979473114014, "learning_rate": 8.566866349650009e-05, "loss": 3.1198, "step": 140750 }, { "epoch": 1.2433988590402516, "grad_norm": 3.4420316219329834, "learning_rate": 8.565892389529131e-05, "loss": 3.6046, "step": 140800 }, { "epoch": 1.2438404069305358, "grad_norm": 0.7216135263442993, "learning_rate": 8.564918153973305e-05, "loss": 3.0498, "step": 140850 }, { "epoch": 1.24428195482082, "grad_norm": 2.0984039306640625, "learning_rate": 8.563943643057779e-05, "loss": 3.2841, "step": 140900 }, { "epoch": 1.244723502711104, "grad_norm": 2.13993501663208, "learning_rate": 8.56296885685783e-05, "loss": 3.3196, "step": 140950 }, { "epoch": 1.2451650506013883, "grad_norm": 3.896669387817383, "learning_rate": 8.561993795448748e-05, "loss": 2.9746, "step": 141000 }, { "epoch": 1.2451650506013883, "eval_asr_loss": 0.9286146542702917, "eval_loss": 2.938673734664917, "eval_runtime": 20.5886, "eval_samples_per_second": 37.302, "eval_steps_per_second": 9.326, "eval_tts_loss": 5.989402265710523, "step": 141000 }, { "epoch": 1.2456065984916724, "grad_norm": 2.933366298675537, "learning_rate": 8.561018458905852e-05, "loss": 2.8598, "step": 141050 }, { "epoch": 1.2460481463819566, "grad_norm": 1.264729380607605, "learning_rate": 8.560042847304476e-05, "loss": 3.3088, "step": 141100 }, { "epoch": 1.2464896942722408, "grad_norm": 3.7516424655914307, "learning_rate": 8.559066960719978e-05, "loss": 3.1826, "step": 141150 }, { "epoch": 1.246931242162525, "grad_norm": 1.529587745666504, "learning_rate": 8.558110325151189e-05, "loss": 3.0593, "step": 141200 }, { "epoch": 1.247372790052809, "grad_norm": 2.4243791103363037, "learning_rate": 8.557133894322516e-05, "loss": 3.3451, "step": 141250 }, { "epoch": 1.2478143379430933, "grad_norm": 1.9787139892578125, "learning_rate": 8.55615718873541e-05, "loss": 2.6297, "step": 141300 }, { "epoch": 1.2482558858333774, "grad_norm": 1.8320086002349854, "learning_rate": 8.555180208465318e-05, "loss": 3.2509, "step": 141350 }, { "epoch": 1.2486974337236616, "grad_norm": 5.408695220947266, "learning_rate": 8.554202953587701e-05, "loss": 3.4093, "step": 141400 }, { "epoch": 1.2491389816139458, "grad_norm": 3.7261509895324707, "learning_rate": 8.553225424178045e-05, "loss": 3.1478, "step": 141450 }, { "epoch": 1.2495805295042302, "grad_norm": 1.5579651594161987, "learning_rate": 8.552247620311855e-05, "loss": 3.2053, "step": 141500 }, { "epoch": 1.2500220773945143, "grad_norm": 2.1170806884765625, "learning_rate": 8.55126954206466e-05, "loss": 3.3457, "step": 141550 }, { "epoch": 1.2504636252847985, "grad_norm": 4.129932403564453, "learning_rate": 8.550291189512005e-05, "loss": 2.4842, "step": 141600 }, { "epoch": 1.2509051731750827, "grad_norm": 0.851900041103363, "learning_rate": 8.549312562729463e-05, "loss": 3.0324, "step": 141650 }, { "epoch": 1.2513467210653668, "grad_norm": 2.4202487468719482, "learning_rate": 8.548333661792621e-05, "loss": 3.6167, "step": 141700 }, { "epoch": 1.251788268955651, "grad_norm": 3.154711961746216, "learning_rate": 8.547354486777094e-05, "loss": 2.9288, "step": 141750 }, { "epoch": 1.2522298168459352, "grad_norm": 2.6830191612243652, "learning_rate": 8.546375037758512e-05, "loss": 3.7127, "step": 141800 }, { "epoch": 1.2526713647362193, "grad_norm": 1.3863435983657837, "learning_rate": 8.545395314812532e-05, "loss": 3.1614, "step": 141850 }, { "epoch": 1.2531129126265035, "grad_norm": 4.197157382965088, "learning_rate": 8.544415318014828e-05, "loss": 3.1192, "step": 141900 }, { "epoch": 1.2535544605167876, "grad_norm": 2.338468074798584, "learning_rate": 8.543435047441096e-05, "loss": 3.8493, "step": 141950 }, { "epoch": 1.2539960084070718, "grad_norm": 3.277184247970581, "learning_rate": 8.542454503167054e-05, "loss": 3.4279, "step": 142000 }, { "epoch": 1.254437556297356, "grad_norm": 1.9274518489837646, "learning_rate": 8.541473685268442e-05, "loss": 3.132, "step": 142050 }, { "epoch": 1.2548791041876401, "grad_norm": 2.079745054244995, "learning_rate": 8.540492593821016e-05, "loss": 3.3633, "step": 142100 }, { "epoch": 1.2553206520779243, "grad_norm": 1.85688316822052, "learning_rate": 8.539511228900563e-05, "loss": 2.9476, "step": 142150 }, { "epoch": 1.2557621999682085, "grad_norm": 1.192519187927246, "learning_rate": 8.53852959058288e-05, "loss": 3.0553, "step": 142200 }, { "epoch": 1.2562037478584926, "grad_norm": 1.694144606590271, "learning_rate": 8.537547678943792e-05, "loss": 3.1106, "step": 142250 }, { "epoch": 1.2566452957487768, "grad_norm": 1.598209023475647, "learning_rate": 8.536565494059143e-05, "loss": 3.225, "step": 142300 }, { "epoch": 1.257086843639061, "grad_norm": 1.6346831321716309, "learning_rate": 8.5355830360048e-05, "loss": 3.1595, "step": 142350 }, { "epoch": 1.2575283915293451, "grad_norm": 1.3039432764053345, "learning_rate": 8.534600304856646e-05, "loss": 3.2784, "step": 142400 }, { "epoch": 1.2579699394196295, "grad_norm": 3.690321207046509, "learning_rate": 8.533617300690593e-05, "loss": 2.9288, "step": 142450 }, { "epoch": 1.2584114873099137, "grad_norm": 2.3511857986450195, "learning_rate": 8.532634023582567e-05, "loss": 3.2557, "step": 142500 }, { "epoch": 1.2588530352001979, "grad_norm": 2.7391929626464844, "learning_rate": 8.531650473608518e-05, "loss": 3.4044, "step": 142550 }, { "epoch": 1.259294583090482, "grad_norm": 4.795861721038818, "learning_rate": 8.530666650844419e-05, "loss": 2.7433, "step": 142600 }, { "epoch": 1.2597361309807662, "grad_norm": 2.0584652423858643, "learning_rate": 8.529682555366259e-05, "loss": 3.2387, "step": 142650 }, { "epoch": 1.2601776788710504, "grad_norm": 3.6735129356384277, "learning_rate": 8.528698187250052e-05, "loss": 3.4929, "step": 142700 }, { "epoch": 1.2606192267613345, "grad_norm": 2.488990545272827, "learning_rate": 8.527713546571834e-05, "loss": 3.4303, "step": 142750 }, { "epoch": 1.2610607746516187, "grad_norm": 3.3868706226348877, "learning_rate": 8.526728633407659e-05, "loss": 3.2953, "step": 142800 }, { "epoch": 1.2615023225419029, "grad_norm": 2.3298208713531494, "learning_rate": 8.525743447833601e-05, "loss": 3.8784, "step": 142850 }, { "epoch": 1.261943870432187, "grad_norm": 7.178310394287109, "learning_rate": 8.52475798992576e-05, "loss": 2.9547, "step": 142900 }, { "epoch": 1.2623854183224712, "grad_norm": 1.7926055192947388, "learning_rate": 8.523772259760255e-05, "loss": 3.4618, "step": 142950 }, { "epoch": 1.2628269662127554, "grad_norm": 2.2966785430908203, "learning_rate": 8.522786257413221e-05, "loss": 2.9838, "step": 143000 }, { "epoch": 1.2632685141030398, "grad_norm": 4.8272600173950195, "learning_rate": 8.52179998296082e-05, "loss": 3.4569, "step": 143050 }, { "epoch": 1.263710061993324, "grad_norm": 1.6934598684310913, "learning_rate": 8.520813436479238e-05, "loss": 3.3623, "step": 143100 }, { "epoch": 1.264151609883608, "grad_norm": 2.3045802116394043, "learning_rate": 8.519826618044672e-05, "loss": 2.8705, "step": 143150 }, { "epoch": 1.2645931577738923, "grad_norm": 1.5127077102661133, "learning_rate": 8.518839527733346e-05, "loss": 3.7103, "step": 143200 }, { "epoch": 1.2650347056641764, "grad_norm": 1.3205996751785278, "learning_rate": 8.517852165621507e-05, "loss": 3.3271, "step": 143250 }, { "epoch": 1.2654762535544606, "grad_norm": 4.060527801513672, "learning_rate": 8.516864531785417e-05, "loss": 2.9748, "step": 143300 }, { "epoch": 1.2659178014447448, "grad_norm": 2.6280086040496826, "learning_rate": 8.515876626301367e-05, "loss": 3.334, "step": 143350 }, { "epoch": 1.266359349335029, "grad_norm": 1.7252777814865112, "learning_rate": 8.514888449245659e-05, "loss": 2.9757, "step": 143400 }, { "epoch": 1.266800897225313, "grad_norm": 2.3652231693267822, "learning_rate": 8.513900000694624e-05, "loss": 3.2599, "step": 143450 }, { "epoch": 1.2672424451155972, "grad_norm": 3.4115631580352783, "learning_rate": 8.512911280724612e-05, "loss": 3.4985, "step": 143500 }, { "epoch": 1.2676839930058814, "grad_norm": 1.9971885681152344, "learning_rate": 8.51192228941199e-05, "loss": 3.3195, "step": 143550 }, { "epoch": 1.2681255408961656, "grad_norm": 1.7323343753814697, "learning_rate": 8.510933026833154e-05, "loss": 2.918, "step": 143600 }, { "epoch": 1.2685670887864497, "grad_norm": 1.7640634775161743, "learning_rate": 8.509943493064512e-05, "loss": 2.8826, "step": 143650 }, { "epoch": 1.269008636676734, "grad_norm": 1.5099214315414429, "learning_rate": 8.508973486936555e-05, "loss": 3.094, "step": 143700 }, { "epoch": 1.269450184567018, "grad_norm": 1.7164794206619263, "learning_rate": 8.507983416437615e-05, "loss": 3.541, "step": 143750 }, { "epoch": 1.2698917324573022, "grad_norm": 1.553723692893982, "learning_rate": 8.506993074976701e-05, "loss": 3.3889, "step": 143800 }, { "epoch": 1.2703332803475864, "grad_norm": 1.3120352029800415, "learning_rate": 8.506002462630309e-05, "loss": 2.9932, "step": 143850 }, { "epoch": 1.2707748282378706, "grad_norm": 4.442413806915283, "learning_rate": 8.50501157947496e-05, "loss": 3.1092, "step": 143900 }, { "epoch": 1.2712163761281547, "grad_norm": 1.6406073570251465, "learning_rate": 8.504020425587187e-05, "loss": 3.1981, "step": 143950 }, { "epoch": 1.271657924018439, "grad_norm": 2.6018967628479004, "learning_rate": 8.503029001043548e-05, "loss": 3.1659, "step": 144000 }, { "epoch": 1.271657924018439, "eval_asr_loss": 0.9322961166804568, "eval_loss": 2.9280214309692383, "eval_runtime": 20.2032, "eval_samples_per_second": 38.014, "eval_steps_per_second": 9.503, "eval_tts_loss": 5.993786748371479, "step": 144000 }, { "epoch": 1.2720994719087233, "grad_norm": 1.6098721027374268, "learning_rate": 8.502037305920624e-05, "loss": 3.3989, "step": 144050 }, { "epoch": 1.2725410197990075, "grad_norm": 1.1011525392532349, "learning_rate": 8.501045340295015e-05, "loss": 2.9338, "step": 144100 }, { "epoch": 1.2729825676892916, "grad_norm": 1.1838862895965576, "learning_rate": 8.500053104243342e-05, "loss": 3.4187, "step": 144150 }, { "epoch": 1.2734241155795758, "grad_norm": 3.3150293827056885, "learning_rate": 8.499060597842247e-05, "loss": 3.6549, "step": 144200 }, { "epoch": 1.27386566346986, "grad_norm": 6.420579433441162, "learning_rate": 8.498067821168389e-05, "loss": 3.0136, "step": 144250 }, { "epoch": 1.2743072113601441, "grad_norm": 1.6961369514465332, "learning_rate": 8.497074774298456e-05, "loss": 2.9492, "step": 144300 }, { "epoch": 1.2747487592504283, "grad_norm": 1.9354748725891113, "learning_rate": 8.496081457309153e-05, "loss": 3.2681, "step": 144350 }, { "epoch": 1.2751903071407125, "grad_norm": 1.1011364459991455, "learning_rate": 8.495087870277201e-05, "loss": 3.1899, "step": 144400 }, { "epoch": 1.2756318550309966, "grad_norm": 2.966367721557617, "learning_rate": 8.49409401327935e-05, "loss": 2.9833, "step": 144450 }, { "epoch": 1.2760734029212808, "grad_norm": 5.4192938804626465, "learning_rate": 8.493099886392364e-05, "loss": 3.3008, "step": 144500 }, { "epoch": 1.276514950811565, "grad_norm": 1.545189619064331, "learning_rate": 8.492105489693034e-05, "loss": 3.3218, "step": 144550 }, { "epoch": 1.2769564987018491, "grad_norm": 1.899052619934082, "learning_rate": 8.491110823258166e-05, "loss": 3.1562, "step": 144600 }, { "epoch": 1.2773980465921335, "grad_norm": 2.4317712783813477, "learning_rate": 8.49011588716459e-05, "loss": 3.5698, "step": 144650 }, { "epoch": 1.2778395944824177, "grad_norm": 0.8498044610023499, "learning_rate": 8.489120681489157e-05, "loss": 3.1922, "step": 144700 }, { "epoch": 1.2782811423727019, "grad_norm": 3.520315170288086, "learning_rate": 8.48812520630874e-05, "loss": 2.8446, "step": 144750 }, { "epoch": 1.278722690262986, "grad_norm": 2.8437373638153076, "learning_rate": 8.48712946170023e-05, "loss": 2.854, "step": 144800 }, { "epoch": 1.2791642381532702, "grad_norm": 2.23712420463562, "learning_rate": 8.486133447740537e-05, "loss": 3.4061, "step": 144850 }, { "epoch": 1.2796057860435543, "grad_norm": 3.989715337753296, "learning_rate": 8.485137164506596e-05, "loss": 3.206, "step": 144900 }, { "epoch": 1.2800473339338385, "grad_norm": 0.8477696776390076, "learning_rate": 8.484140612075364e-05, "loss": 3.3947, "step": 144950 }, { "epoch": 1.2804888818241227, "grad_norm": 1.0561884641647339, "learning_rate": 8.483143790523813e-05, "loss": 3.4777, "step": 145000 }, { "epoch": 1.2809304297144068, "grad_norm": 3.1107983589172363, "learning_rate": 8.482146699928941e-05, "loss": 3.2744, "step": 145050 }, { "epoch": 1.281371977604691, "grad_norm": 2.971332311630249, "learning_rate": 8.481149340367764e-05, "loss": 3.6498, "step": 145100 }, { "epoch": 1.2818135254949752, "grad_norm": 2.0575344562530518, "learning_rate": 8.480151711917321e-05, "loss": 2.7598, "step": 145150 }, { "epoch": 1.2822550733852593, "grad_norm": 1.7395824193954468, "learning_rate": 8.479153814654667e-05, "loss": 3.0415, "step": 145200 }, { "epoch": 1.2826966212755435, "grad_norm": 1.918105125427246, "learning_rate": 8.478155648656885e-05, "loss": 3.4108, "step": 145250 }, { "epoch": 1.2831381691658277, "grad_norm": 1.5583088397979736, "learning_rate": 8.477157214001072e-05, "loss": 3.1427, "step": 145300 }, { "epoch": 1.2835797170561118, "grad_norm": 2.076531410217285, "learning_rate": 8.47615851076435e-05, "loss": 3.1514, "step": 145350 }, { "epoch": 1.284021264946396, "grad_norm": 1.9906885623931885, "learning_rate": 8.47515953902386e-05, "loss": 3.2949, "step": 145400 }, { "epoch": 1.2844628128366802, "grad_norm": 1.319674015045166, "learning_rate": 8.474160298856764e-05, "loss": 3.5379, "step": 145450 }, { "epoch": 1.2849043607269643, "grad_norm": 2.557248830795288, "learning_rate": 8.473160790340245e-05, "loss": 3.4775, "step": 145500 }, { "epoch": 1.2853459086172485, "grad_norm": 1.7793129682540894, "learning_rate": 8.472161013551506e-05, "loss": 2.6998, "step": 145550 }, { "epoch": 1.2857874565075327, "grad_norm": 2.644193410873413, "learning_rate": 8.471160968567773e-05, "loss": 3.0271, "step": 145600 }, { "epoch": 1.286229004397817, "grad_norm": 2.9333336353302, "learning_rate": 8.470160655466289e-05, "loss": 3.0707, "step": 145650 }, { "epoch": 1.2866705522881012, "grad_norm": 1.7270702123641968, "learning_rate": 8.46916007432432e-05, "loss": 3.2489, "step": 145700 }, { "epoch": 1.2871121001783854, "grad_norm": 2.789318084716797, "learning_rate": 8.468159225219153e-05, "loss": 3.3415, "step": 145750 }, { "epoch": 1.2875536480686696, "grad_norm": 2.3298559188842773, "learning_rate": 8.467158108228094e-05, "loss": 3.4328, "step": 145800 }, { "epoch": 1.2879951959589537, "grad_norm": 2.0260627269744873, "learning_rate": 8.466156723428475e-05, "loss": 2.9463, "step": 145850 }, { "epoch": 1.288436743849238, "grad_norm": 1.130997896194458, "learning_rate": 8.465155070897639e-05, "loss": 3.4273, "step": 145900 }, { "epoch": 1.288878291739522, "grad_norm": 5.2059221267700195, "learning_rate": 8.464153150712957e-05, "loss": 3.4943, "step": 145950 }, { "epoch": 1.2893198396298062, "grad_norm": 1.7285943031311035, "learning_rate": 8.46315096295182e-05, "loss": 3.4631, "step": 146000 }, { "epoch": 1.2897613875200904, "grad_norm": 3.8934295177459717, "learning_rate": 8.462148507691638e-05, "loss": 3.4679, "step": 146050 }, { "epoch": 1.2902029354103746, "grad_norm": 4.572065353393555, "learning_rate": 8.461145785009843e-05, "loss": 3.2549, "step": 146100 }, { "epoch": 1.2906444833006587, "grad_norm": 2.979644775390625, "learning_rate": 8.460142794983883e-05, "loss": 3.023, "step": 146150 }, { "epoch": 1.291086031190943, "grad_norm": 2.907404661178589, "learning_rate": 8.459139537691235e-05, "loss": 3.2693, "step": 146200 }, { "epoch": 1.2915275790812273, "grad_norm": 3.809751510620117, "learning_rate": 8.458136013209391e-05, "loss": 3.6373, "step": 146250 }, { "epoch": 1.2919691269715115, "grad_norm": 2.584719181060791, "learning_rate": 8.457132221615862e-05, "loss": 3.107, "step": 146300 }, { "epoch": 1.2924106748617956, "grad_norm": 1.6046016216278076, "learning_rate": 8.456128162988186e-05, "loss": 3.3261, "step": 146350 }, { "epoch": 1.2928522227520798, "grad_norm": 1.9032001495361328, "learning_rate": 8.455123837403916e-05, "loss": 3.2709, "step": 146400 }, { "epoch": 1.293293770642364, "grad_norm": 0.8189128041267395, "learning_rate": 8.45411924494063e-05, "loss": 2.8643, "step": 146450 }, { "epoch": 1.2937353185326481, "grad_norm": 2.8768742084503174, "learning_rate": 8.45311438567592e-05, "loss": 3.0366, "step": 146500 }, { "epoch": 1.2941768664229323, "grad_norm": 1.3201286792755127, "learning_rate": 8.452109259687407e-05, "loss": 3.1472, "step": 146550 }, { "epoch": 1.2946184143132164, "grad_norm": 2.3641459941864014, "learning_rate": 8.451103867052725e-05, "loss": 3.7229, "step": 146600 }, { "epoch": 1.2950599622035006, "grad_norm": 7.314673900604248, "learning_rate": 8.450098207849534e-05, "loss": 3.1165, "step": 146650 }, { "epoch": 1.2955015100937848, "grad_norm": 2.273407220840454, "learning_rate": 8.449092282155513e-05, "loss": 3.0138, "step": 146700 }, { "epoch": 1.295943057984069, "grad_norm": 2.268495798110962, "learning_rate": 8.448086090048361e-05, "loss": 2.4604, "step": 146750 }, { "epoch": 1.2963846058743531, "grad_norm": 1.9816882610321045, "learning_rate": 8.447079631605797e-05, "loss": 3.2556, "step": 146800 }, { "epoch": 1.2968261537646373, "grad_norm": 1.5324925184249878, "learning_rate": 8.446072906905562e-05, "loss": 3.2377, "step": 146850 }, { "epoch": 1.2972677016549214, "grad_norm": 4.843852996826172, "learning_rate": 8.445065916025416e-05, "loss": 3.302, "step": 146900 }, { "epoch": 1.2977092495452056, "grad_norm": 4.478860855102539, "learning_rate": 8.444058659043143e-05, "loss": 3.0785, "step": 146950 }, { "epoch": 1.2981507974354898, "grad_norm": 4.566507339477539, "learning_rate": 8.443051136036542e-05, "loss": 3.5216, "step": 147000 }, { "epoch": 1.2981507974354898, "eval_asr_loss": 0.9203305681771484, "eval_loss": 2.9263274669647217, "eval_runtime": 20.2234, "eval_samples_per_second": 37.976, "eval_steps_per_second": 9.494, "eval_tts_loss": 5.990303801165284, "step": 147000 }, { "epoch": 1.298592345325774, "grad_norm": 1.1812950372695923, "learning_rate": 8.442043347083438e-05, "loss": 3.3527, "step": 147050 }, { "epoch": 1.299033893216058, "grad_norm": 0.6211222410202026, "learning_rate": 8.441035292261672e-05, "loss": 3.4486, "step": 147100 }, { "epoch": 1.2994754411063423, "grad_norm": 3.0547852516174316, "learning_rate": 8.440047140665608e-05, "loss": 2.8583, "step": 147150 }, { "epoch": 1.2999169889966264, "grad_norm": 1.5730994939804077, "learning_rate": 8.439038559653625e-05, "loss": 3.1623, "step": 147200 }, { "epoch": 1.3003585368869108, "grad_norm": 4.245264053344727, "learning_rate": 8.438029713005078e-05, "loss": 3.1082, "step": 147250 }, { "epoch": 1.300800084777195, "grad_norm": 2.3482489585876465, "learning_rate": 8.437020600797888e-05, "loss": 2.945, "step": 147300 }, { "epoch": 1.3012416326674792, "grad_norm": 0.9055774807929993, "learning_rate": 8.436011223110004e-05, "loss": 3.1753, "step": 147350 }, { "epoch": 1.3016831805577633, "grad_norm": 2.761030912399292, "learning_rate": 8.435001580019388e-05, "loss": 3.1069, "step": 147400 }, { "epoch": 1.3021247284480475, "grad_norm": 3.1822400093078613, "learning_rate": 8.433991671604028e-05, "loss": 3.3119, "step": 147450 }, { "epoch": 1.3025662763383317, "grad_norm": 2.4787344932556152, "learning_rate": 8.43298149794193e-05, "loss": 3.4397, "step": 147500 }, { "epoch": 1.3030078242286158, "grad_norm": 3.7208900451660156, "learning_rate": 8.431971059111124e-05, "loss": 3.4477, "step": 147550 }, { "epoch": 1.3034493721189, "grad_norm": 3.309570550918579, "learning_rate": 8.430960355189654e-05, "loss": 3.03, "step": 147600 }, { "epoch": 1.3038909200091842, "grad_norm": 0.6187686920166016, "learning_rate": 8.429949386255591e-05, "loss": 3.2692, "step": 147650 }, { "epoch": 1.3043324678994683, "grad_norm": 4.8789381980896, "learning_rate": 8.428938152387024e-05, "loss": 3.1269, "step": 147700 }, { "epoch": 1.3047740157897525, "grad_norm": 1.8257217407226562, "learning_rate": 8.42792665366206e-05, "loss": 3.6406, "step": 147750 }, { "epoch": 1.3052155636800367, "grad_norm": 0.7555809020996094, "learning_rate": 8.42691489015883e-05, "loss": 3.3547, "step": 147800 }, { "epoch": 1.305657111570321, "grad_norm": 2.5792572498321533, "learning_rate": 8.425902861955485e-05, "loss": 2.7654, "step": 147850 }, { "epoch": 1.3060986594606052, "grad_norm": 2.951904296875, "learning_rate": 8.424890569130195e-05, "loss": 3.4109, "step": 147900 }, { "epoch": 1.3065402073508894, "grad_norm": 2.7723746299743652, "learning_rate": 8.423878011761149e-05, "loss": 3.6622, "step": 147950 }, { "epoch": 1.3069817552411735, "grad_norm": 3.5301687717437744, "learning_rate": 8.422865189926561e-05, "loss": 3.3498, "step": 148000 }, { "epoch": 1.3074233031314577, "grad_norm": 2.9211134910583496, "learning_rate": 8.421852103704664e-05, "loss": 2.8255, "step": 148050 }, { "epoch": 1.3078648510217419, "grad_norm": 2.1544718742370605, "learning_rate": 8.420838753173705e-05, "loss": 3.3958, "step": 148100 }, { "epoch": 1.308306398912026, "grad_norm": 1.1590626239776611, "learning_rate": 8.419825138411963e-05, "loss": 3.156, "step": 148150 }, { "epoch": 1.3087479468023102, "grad_norm": 2.3119921684265137, "learning_rate": 8.418811259497727e-05, "loss": 3.5458, "step": 148200 }, { "epoch": 1.3091894946925944, "grad_norm": 5.003969192504883, "learning_rate": 8.417797116509312e-05, "loss": 3.0429, "step": 148250 }, { "epoch": 1.3096310425828785, "grad_norm": 3.385624408721924, "learning_rate": 8.416782709525053e-05, "loss": 3.5067, "step": 148300 }, { "epoch": 1.3100725904731627, "grad_norm": 2.701323986053467, "learning_rate": 8.415768038623302e-05, "loss": 3.4576, "step": 148350 }, { "epoch": 1.3105141383634469, "grad_norm": 1.0806344747543335, "learning_rate": 8.414753103882434e-05, "loss": 3.084, "step": 148400 }, { "epoch": 1.310955686253731, "grad_norm": 2.5085790157318115, "learning_rate": 8.413737905380848e-05, "loss": 3.1328, "step": 148450 }, { "epoch": 1.3113972341440152, "grad_norm": 2.03137469291687, "learning_rate": 8.412722443196953e-05, "loss": 3.4205, "step": 148500 }, { "epoch": 1.3118387820342994, "grad_norm": 2.739572286605835, "learning_rate": 8.41170671740919e-05, "loss": 3.5465, "step": 148550 }, { "epoch": 1.3122803299245835, "grad_norm": 1.639987826347351, "learning_rate": 8.410690728096013e-05, "loss": 3.1524, "step": 148600 }, { "epoch": 1.3127218778148677, "grad_norm": 3.545746088027954, "learning_rate": 8.4096744753359e-05, "loss": 2.8028, "step": 148650 }, { "epoch": 1.3131634257051519, "grad_norm": 5.186053276062012, "learning_rate": 8.408657959207347e-05, "loss": 3.3191, "step": 148700 }, { "epoch": 1.313604973595436, "grad_norm": 0.6773577928543091, "learning_rate": 8.40764117978887e-05, "loss": 3.2917, "step": 148750 }, { "epoch": 1.3140465214857202, "grad_norm": 1.8338830471038818, "learning_rate": 8.406624137159008e-05, "loss": 3.3637, "step": 148800 }, { "epoch": 1.3144880693760046, "grad_norm": 1.1749883890151978, "learning_rate": 8.405606831396318e-05, "loss": 3.0744, "step": 148850 }, { "epoch": 1.3149296172662888, "grad_norm": 2.124967575073242, "learning_rate": 8.404589262579381e-05, "loss": 3.4487, "step": 148900 }, { "epoch": 1.315371165156573, "grad_norm": 2.8038904666900635, "learning_rate": 8.403571430786793e-05, "loss": 3.1075, "step": 148950 }, { "epoch": 1.315812713046857, "grad_norm": 4.513190746307373, "learning_rate": 8.402553336097172e-05, "loss": 2.9033, "step": 149000 }, { "epoch": 1.3162542609371413, "grad_norm": 3.4421756267547607, "learning_rate": 8.40153497858916e-05, "loss": 3.0916, "step": 149050 }, { "epoch": 1.3166958088274254, "grad_norm": 1.1779747009277344, "learning_rate": 8.400516358341415e-05, "loss": 2.8582, "step": 149100 }, { "epoch": 1.3171373567177096, "grad_norm": 6.414771556854248, "learning_rate": 8.399497475432616e-05, "loss": 3.1022, "step": 149150 }, { "epoch": 1.3175789046079938, "grad_norm": 1.1380168199539185, "learning_rate": 8.398478329941466e-05, "loss": 3.1022, "step": 149200 }, { "epoch": 1.318020452498278, "grad_norm": 2.7382118701934814, "learning_rate": 8.397458921946682e-05, "loss": 3.0475, "step": 149250 }, { "epoch": 1.318462000388562, "grad_norm": 3.204192876815796, "learning_rate": 8.396439251527004e-05, "loss": 3.0297, "step": 149300 }, { "epoch": 1.3189035482788463, "grad_norm": 1.7281981706619263, "learning_rate": 8.395419318761197e-05, "loss": 3.0445, "step": 149350 }, { "epoch": 1.3193450961691304, "grad_norm": 2.468430280685425, "learning_rate": 8.39439912372804e-05, "loss": 2.9012, "step": 149400 }, { "epoch": 1.3197866440594148, "grad_norm": 3.0778887271881104, "learning_rate": 8.393378666506333e-05, "loss": 3.4102, "step": 149450 }, { "epoch": 1.320228191949699, "grad_norm": 1.3242723941802979, "learning_rate": 8.3923579471749e-05, "loss": 2.794, "step": 149500 }, { "epoch": 1.3206697398399831, "grad_norm": 5.034628391265869, "learning_rate": 8.391336965812581e-05, "loss": 3.0745, "step": 149550 }, { "epoch": 1.3211112877302673, "grad_norm": 3.1892762184143066, "learning_rate": 8.390315722498241e-05, "loss": 3.0208, "step": 149600 }, { "epoch": 1.3215528356205515, "grad_norm": 1.5829938650131226, "learning_rate": 8.389294217310759e-05, "loss": 3.2962, "step": 149650 }, { "epoch": 1.3219943835108356, "grad_norm": 3.269179105758667, "learning_rate": 8.38827245032904e-05, "loss": 3.3731, "step": 149700 }, { "epoch": 1.3224359314011198, "grad_norm": 2.511181116104126, "learning_rate": 8.387250421632005e-05, "loss": 3.2497, "step": 149750 }, { "epoch": 1.322877479291404, "grad_norm": 1.1484040021896362, "learning_rate": 8.386228131298597e-05, "loss": 3.205, "step": 149800 }, { "epoch": 1.3233190271816881, "grad_norm": 0.9007138609886169, "learning_rate": 8.385205579407782e-05, "loss": 2.6462, "step": 149850 }, { "epoch": 1.3237605750719723, "grad_norm": 1.4533042907714844, "learning_rate": 8.38418276603854e-05, "loss": 3.2513, "step": 149900 }, { "epoch": 1.3242021229622565, "grad_norm": 3.494771957397461, "learning_rate": 8.38315969126988e-05, "loss": 3.298, "step": 149950 }, { "epoch": 1.3246436708525406, "grad_norm": 3.4420292377471924, "learning_rate": 8.382136355180819e-05, "loss": 3.4275, "step": 150000 }, { "epoch": 1.3246436708525406, "eval_asr_loss": 0.9286041771558345, "eval_loss": 2.9267418384552, "eval_runtime": 20.4668, "eval_samples_per_second": 37.524, "eval_steps_per_second": 9.381, "eval_tts_loss": 5.983196550111349, "step": 150000 }, { "epoch": 1.3250852187428248, "grad_norm": 4.632486820220947, "learning_rate": 8.381112757850405e-05, "loss": 2.9501, "step": 150050 }, { "epoch": 1.325526766633109, "grad_norm": 1.6033202409744263, "learning_rate": 8.380088899357701e-05, "loss": 2.4565, "step": 150100 }, { "epoch": 1.3259683145233931, "grad_norm": 2.7419354915618896, "learning_rate": 8.37906477978179e-05, "loss": 3.2429, "step": 150150 }, { "epoch": 1.3264098624136773, "grad_norm": 1.273281216621399, "learning_rate": 8.378040399201783e-05, "loss": 3.1723, "step": 150200 }, { "epoch": 1.3268514103039615, "grad_norm": 4.021177291870117, "learning_rate": 8.377015757696797e-05, "loss": 2.9459, "step": 150250 }, { "epoch": 1.3272929581942456, "grad_norm": 2.726773500442505, "learning_rate": 8.375990855345981e-05, "loss": 3.2983, "step": 150300 }, { "epoch": 1.3277345060845298, "grad_norm": 1.6660208702087402, "learning_rate": 8.374965692228499e-05, "loss": 2.8783, "step": 150350 }, { "epoch": 1.328176053974814, "grad_norm": 12.518718719482422, "learning_rate": 8.373940268423535e-05, "loss": 3.518, "step": 150400 }, { "epoch": 1.3286176018650984, "grad_norm": 3.1526901721954346, "learning_rate": 8.372914584010297e-05, "loss": 3.4692, "step": 150450 }, { "epoch": 1.3290591497553825, "grad_norm": 0.5204312801361084, "learning_rate": 8.371888639068008e-05, "loss": 3.257, "step": 150500 }, { "epoch": 1.3295006976456667, "grad_norm": 3.0339365005493164, "learning_rate": 8.370862433675915e-05, "loss": 3.2844, "step": 150550 }, { "epoch": 1.3299422455359509, "grad_norm": 1.2331526279449463, "learning_rate": 8.369835967913282e-05, "loss": 3.2728, "step": 150600 }, { "epoch": 1.330383793426235, "grad_norm": 4.784111022949219, "learning_rate": 8.368809241859397e-05, "loss": 3.0626, "step": 150650 }, { "epoch": 1.3308253413165192, "grad_norm": 0.7437164783477783, "learning_rate": 8.367782255593564e-05, "loss": 3.2591, "step": 150700 }, { "epoch": 1.3312668892068034, "grad_norm": 1.0057419538497925, "learning_rate": 8.366755009195111e-05, "loss": 3.4121, "step": 150750 }, { "epoch": 1.3317084370970875, "grad_norm": 1.6400574445724487, "learning_rate": 8.365727502743381e-05, "loss": 3.0203, "step": 150800 }, { "epoch": 1.3321499849873717, "grad_norm": 2.0449421405792236, "learning_rate": 8.364699736317743e-05, "loss": 3.4384, "step": 150850 }, { "epoch": 1.3325915328776559, "grad_norm": 1.9299911260604858, "learning_rate": 8.363671709997582e-05, "loss": 3.2457, "step": 150900 }, { "epoch": 1.33303308076794, "grad_norm": 2.1701300144195557, "learning_rate": 8.362643423862305e-05, "loss": 3.1797, "step": 150950 }, { "epoch": 1.3334746286582242, "grad_norm": 1.6506251096725464, "learning_rate": 8.361614877991338e-05, "loss": 2.9922, "step": 151000 }, { "epoch": 1.3339161765485086, "grad_norm": 1.9627467393875122, "learning_rate": 8.360586072464126e-05, "loss": 3.407, "step": 151050 }, { "epoch": 1.3343577244387927, "grad_norm": 2.9375967979431152, "learning_rate": 8.359557007360136e-05, "loss": 3.3929, "step": 151100 }, { "epoch": 1.334799272329077, "grad_norm": 2.7383005619049072, "learning_rate": 8.358527682758858e-05, "loss": 3.443, "step": 151150 }, { "epoch": 1.335240820219361, "grad_norm": 1.5693986415863037, "learning_rate": 8.357498098739793e-05, "loss": 3.4043, "step": 151200 }, { "epoch": 1.3356823681096452, "grad_norm": 4.640061855316162, "learning_rate": 8.356468255382474e-05, "loss": 3.5593, "step": 151250 }, { "epoch": 1.3361239159999294, "grad_norm": 2.2127010822296143, "learning_rate": 8.355438152766442e-05, "loss": 3.7348, "step": 151300 }, { "epoch": 1.3365654638902136, "grad_norm": 2.803560972213745, "learning_rate": 8.354407790971268e-05, "loss": 3.3592, "step": 151350 }, { "epoch": 1.3370070117804977, "grad_norm": 1.9920105934143066, "learning_rate": 8.353377170076536e-05, "loss": 3.2962, "step": 151400 }, { "epoch": 1.337448559670782, "grad_norm": 1.280686378479004, "learning_rate": 8.352346290161852e-05, "loss": 3.347, "step": 151450 }, { "epoch": 1.337890107561066, "grad_norm": 2.0447115898132324, "learning_rate": 8.351315151306845e-05, "loss": 3.2339, "step": 151500 }, { "epoch": 1.3383316554513502, "grad_norm": 2.681647777557373, "learning_rate": 8.350283753591161e-05, "loss": 3.2523, "step": 151550 }, { "epoch": 1.3387732033416344, "grad_norm": 4.001578330993652, "learning_rate": 8.349252097094467e-05, "loss": 3.0276, "step": 151600 }, { "epoch": 1.3392147512319186, "grad_norm": 1.5295051336288452, "learning_rate": 8.348220181896449e-05, "loss": 3.6369, "step": 151650 }, { "epoch": 1.3396562991222027, "grad_norm": 3.501155138015747, "learning_rate": 8.347208654087182e-05, "loss": 3.1035, "step": 151700 }, { "epoch": 1.340097847012487, "grad_norm": 3.054642915725708, "learning_rate": 8.346176226895712e-05, "loss": 3.1906, "step": 151750 }, { "epoch": 1.340539394902771, "grad_norm": 2.961508274078369, "learning_rate": 8.345143541240505e-05, "loss": 2.7505, "step": 151800 }, { "epoch": 1.3409809427930552, "grad_norm": 2.213796377182007, "learning_rate": 8.344110597201324e-05, "loss": 3.4039, "step": 151850 }, { "epoch": 1.3414224906833394, "grad_norm": 3.433262825012207, "learning_rate": 8.343077394857957e-05, "loss": 3.3918, "step": 151900 }, { "epoch": 1.3418640385736236, "grad_norm": 2.9317116737365723, "learning_rate": 8.34204393429021e-05, "loss": 3.7814, "step": 151950 }, { "epoch": 1.3423055864639077, "grad_norm": 1.5801756381988525, "learning_rate": 8.341010215577909e-05, "loss": 3.4844, "step": 152000 }, { "epoch": 1.3427471343541921, "grad_norm": 4.749630451202393, "learning_rate": 8.3399762388009e-05, "loss": 3.5081, "step": 152050 }, { "epoch": 1.3431886822444763, "grad_norm": 1.1885944604873657, "learning_rate": 8.338942004039047e-05, "loss": 3.2524, "step": 152100 }, { "epoch": 1.3436302301347605, "grad_norm": 2.543466567993164, "learning_rate": 8.33790751137224e-05, "loss": 3.0227, "step": 152150 }, { "epoch": 1.3440717780250446, "grad_norm": 1.6229182481765747, "learning_rate": 8.33687276088038e-05, "loss": 3.2959, "step": 152200 }, { "epoch": 1.3445133259153288, "grad_norm": 1.94907808303833, "learning_rate": 8.335837752643397e-05, "loss": 2.9457, "step": 152250 }, { "epoch": 1.344954873805613, "grad_norm": 1.4191594123840332, "learning_rate": 8.334802486741233e-05, "loss": 3.6033, "step": 152300 }, { "epoch": 1.3453964216958971, "grad_norm": 2.68353271484375, "learning_rate": 8.333766963253858e-05, "loss": 2.968, "step": 152350 }, { "epoch": 1.3458379695861813, "grad_norm": 3.616288185119629, "learning_rate": 8.332731182261251e-05, "loss": 2.9924, "step": 152400 }, { "epoch": 1.3462795174764655, "grad_norm": 1.501586675643921, "learning_rate": 8.331695143843424e-05, "loss": 3.0411, "step": 152450 }, { "epoch": 1.3467210653667496, "grad_norm": 2.923124074935913, "learning_rate": 8.330658848080397e-05, "loss": 3.5488, "step": 152500 }, { "epoch": 1.3471626132570338, "grad_norm": 2.4280457496643066, "learning_rate": 8.329622295052218e-05, "loss": 3.202, "step": 152550 }, { "epoch": 1.347604161147318, "grad_norm": 1.1072304248809814, "learning_rate": 8.328585484838952e-05, "loss": 2.9447, "step": 152600 }, { "epoch": 1.3480457090376023, "grad_norm": 2.937720775604248, "learning_rate": 8.327548417520681e-05, "loss": 3.2629, "step": 152650 }, { "epoch": 1.3484872569278865, "grad_norm": 1.4031856060028076, "learning_rate": 8.326511093177511e-05, "loss": 3.7229, "step": 152700 }, { "epoch": 1.3489288048181707, "grad_norm": 1.5467109680175781, "learning_rate": 8.325473511889569e-05, "loss": 3.2737, "step": 152750 }, { "epoch": 1.3493703527084548, "grad_norm": 1.7189159393310547, "learning_rate": 8.324435673736997e-05, "loss": 3.3052, "step": 152800 }, { "epoch": 1.349811900598739, "grad_norm": 1.80332612991333, "learning_rate": 8.323397578799959e-05, "loss": 3.8028, "step": 152850 }, { "epoch": 1.3502534484890232, "grad_norm": 3.8326416015625, "learning_rate": 8.32235922715864e-05, "loss": 2.936, "step": 152900 }, { "epoch": 1.3506949963793073, "grad_norm": 2.770670175552368, "learning_rate": 8.321320618893244e-05, "loss": 2.9054, "step": 152950 }, { "epoch": 1.3511365442695915, "grad_norm": 2.175163507461548, "learning_rate": 8.320281754083994e-05, "loss": 3.6902, "step": 153000 }, { "epoch": 1.3511365442695915, "eval_asr_loss": 0.9108942616081308, "eval_loss": 2.9208271503448486, "eval_runtime": 20.3468, "eval_samples_per_second": 37.745, "eval_steps_per_second": 9.436, "eval_tts_loss": 5.98267045013614, "step": 153000 }, { "epoch": 1.3515780921598757, "grad_norm": 2.5464835166931152, "learning_rate": 8.319242632811132e-05, "loss": 3.3609, "step": 153050 }, { "epoch": 1.3520196400501598, "grad_norm": 2.2353081703186035, "learning_rate": 8.318203255154926e-05, "loss": 3.2348, "step": 153100 }, { "epoch": 1.352461187940444, "grad_norm": 1.6320254802703857, "learning_rate": 8.317163621195654e-05, "loss": 3.2052, "step": 153150 }, { "epoch": 1.3529027358307282, "grad_norm": 2.5109684467315674, "learning_rate": 8.316123731013621e-05, "loss": 3.5582, "step": 153200 }, { "epoch": 1.3533442837210123, "grad_norm": 1.6748558282852173, "learning_rate": 8.31508358468915e-05, "loss": 2.6611, "step": 153250 }, { "epoch": 1.3537858316112965, "grad_norm": 0.912735641002655, "learning_rate": 8.314043182302583e-05, "loss": 3.4849, "step": 153300 }, { "epoch": 1.3542273795015807, "grad_norm": 1.289677381515503, "learning_rate": 8.313002523934284e-05, "loss": 2.6686, "step": 153350 }, { "epoch": 1.3546689273918648, "grad_norm": 2.805980920791626, "learning_rate": 8.311961609664632e-05, "loss": 3.393, "step": 153400 }, { "epoch": 1.355110475282149, "grad_norm": 2.8691484928131104, "learning_rate": 8.310920439574029e-05, "loss": 2.8758, "step": 153450 }, { "epoch": 1.3555520231724332, "grad_norm": 2.143998861312866, "learning_rate": 8.309879013742898e-05, "loss": 3.4699, "step": 153500 }, { "epoch": 1.3559935710627173, "grad_norm": 1.3978588581085205, "learning_rate": 8.308837332251682e-05, "loss": 3.5442, "step": 153550 }, { "epoch": 1.3564351189530015, "grad_norm": 2.2007105350494385, "learning_rate": 8.30779539518084e-05, "loss": 2.9063, "step": 153600 }, { "epoch": 1.356876666843286, "grad_norm": 2.752504348754883, "learning_rate": 8.306753202610853e-05, "loss": 3.4736, "step": 153650 }, { "epoch": 1.35731821473357, "grad_norm": 3.7296342849731445, "learning_rate": 8.305710754622221e-05, "loss": 3.1397, "step": 153700 }, { "epoch": 1.3577597626238542, "grad_norm": 2.1975762844085693, "learning_rate": 8.304668051295464e-05, "loss": 3.0458, "step": 153750 }, { "epoch": 1.3582013105141384, "grad_norm": 1.018903136253357, "learning_rate": 8.303625092711125e-05, "loss": 3.3913, "step": 153800 }, { "epoch": 1.3586428584044226, "grad_norm": 1.9552948474884033, "learning_rate": 8.30258187894976e-05, "loss": 3.2726, "step": 153850 }, { "epoch": 1.3590844062947067, "grad_norm": 6.85836935043335, "learning_rate": 8.301538410091949e-05, "loss": 3.5054, "step": 153900 }, { "epoch": 1.359525954184991, "grad_norm": 2.9646308422088623, "learning_rate": 8.300494686218295e-05, "loss": 3.0811, "step": 153950 }, { "epoch": 1.359967502075275, "grad_norm": 3.7286722660064697, "learning_rate": 8.299450707409411e-05, "loss": 3.1636, "step": 154000 }, { "epoch": 1.3604090499655592, "grad_norm": 1.8345937728881836, "learning_rate": 8.298406473745939e-05, "loss": 3.6935, "step": 154050 }, { "epoch": 1.3608505978558434, "grad_norm": 2.4694504737854004, "learning_rate": 8.297361985308538e-05, "loss": 3.09, "step": 154100 }, { "epoch": 1.3612921457461276, "grad_norm": 1.6211429834365845, "learning_rate": 8.296317242177883e-05, "loss": 3.5075, "step": 154150 }, { "epoch": 1.3617336936364117, "grad_norm": 3.931645154953003, "learning_rate": 8.295272244434672e-05, "loss": 2.8835, "step": 154200 }, { "epoch": 1.3621752415266961, "grad_norm": 2.9920766353607178, "learning_rate": 8.294226992159626e-05, "loss": 3.3678, "step": 154250 }, { "epoch": 1.3626167894169803, "grad_norm": 0.9993804097175598, "learning_rate": 8.293181485433477e-05, "loss": 3.493, "step": 154300 }, { "epoch": 1.3630583373072644, "grad_norm": 1.9843803644180298, "learning_rate": 8.292135724336984e-05, "loss": 3.2889, "step": 154350 }, { "epoch": 1.3634998851975486, "grad_norm": 2.757063865661621, "learning_rate": 8.291089708950925e-05, "loss": 3.6329, "step": 154400 }, { "epoch": 1.3639414330878328, "grad_norm": 2.337742805480957, "learning_rate": 8.290043439356091e-05, "loss": 2.9563, "step": 154450 }, { "epoch": 1.364382980978117, "grad_norm": 3.824697971343994, "learning_rate": 8.288996915633301e-05, "loss": 3.0373, "step": 154500 }, { "epoch": 1.364824528868401, "grad_norm": 3.1068763732910156, "learning_rate": 8.28795013786339e-05, "loss": 3.5153, "step": 154550 }, { "epoch": 1.3652660767586853, "grad_norm": 1.7246665954589844, "learning_rate": 8.286903106127211e-05, "loss": 3.1697, "step": 154600 }, { "epoch": 1.3657076246489694, "grad_norm": 1.9760079383850098, "learning_rate": 8.285855820505638e-05, "loss": 3.2777, "step": 154650 }, { "epoch": 1.3661491725392536, "grad_norm": 1.5915566682815552, "learning_rate": 8.284808281079567e-05, "loss": 3.2404, "step": 154700 }, { "epoch": 1.3665907204295378, "grad_norm": 3.363936424255371, "learning_rate": 8.283760487929911e-05, "loss": 3.1442, "step": 154750 }, { "epoch": 1.367032268319822, "grad_norm": 1.9960918426513672, "learning_rate": 8.282712441137603e-05, "loss": 3.5091, "step": 154800 }, { "epoch": 1.367473816210106, "grad_norm": 1.3129304647445679, "learning_rate": 8.281664140783594e-05, "loss": 2.9314, "step": 154850 }, { "epoch": 1.3679153641003903, "grad_norm": 2.3583755493164062, "learning_rate": 8.280615586948859e-05, "loss": 3.226, "step": 154900 }, { "epoch": 1.3683569119906744, "grad_norm": 3.2506699562072754, "learning_rate": 8.27956677971439e-05, "loss": 3.0299, "step": 154950 }, { "epoch": 1.3687984598809586, "grad_norm": 1.7959414720535278, "learning_rate": 8.278517719161196e-05, "loss": 3.1626, "step": 155000 }, { "epoch": 1.3692400077712428, "grad_norm": 1.9496901035308838, "learning_rate": 8.277468405370308e-05, "loss": 3.2722, "step": 155050 }, { "epoch": 1.369681555661527, "grad_norm": 2.7062554359436035, "learning_rate": 8.27643983224214e-05, "loss": 3.1118, "step": 155100 }, { "epoch": 1.370123103551811, "grad_norm": 5.150788307189941, "learning_rate": 8.275390017279756e-05, "loss": 3.0469, "step": 155150 }, { "epoch": 1.3705646514420953, "grad_norm": 4.183962345123291, "learning_rate": 8.274339949321266e-05, "loss": 2.9026, "step": 155200 }, { "epoch": 1.3710061993323797, "grad_norm": 3.3120381832122803, "learning_rate": 8.273289628447782e-05, "loss": 2.9835, "step": 155250 }, { "epoch": 1.3714477472226638, "grad_norm": 5.01318883895874, "learning_rate": 8.272239054740431e-05, "loss": 3.1624, "step": 155300 }, { "epoch": 1.371889295112948, "grad_norm": 2.8328940868377686, "learning_rate": 8.27118822828036e-05, "loss": 3.1617, "step": 155350 }, { "epoch": 1.3723308430032322, "grad_norm": 1.2944576740264893, "learning_rate": 8.270137149148737e-05, "loss": 2.9411, "step": 155400 }, { "epoch": 1.3727723908935163, "grad_norm": 0.6006796956062317, "learning_rate": 8.269085817426751e-05, "loss": 3.0553, "step": 155450 }, { "epoch": 1.3732139387838005, "grad_norm": 2.5675642490386963, "learning_rate": 8.268034233195605e-05, "loss": 3.7048, "step": 155500 }, { "epoch": 1.3736554866740847, "grad_norm": 2.5069117546081543, "learning_rate": 8.266982396536527e-05, "loss": 2.8343, "step": 155550 }, { "epoch": 1.3740970345643688, "grad_norm": 2.1562747955322266, "learning_rate": 8.265930307530763e-05, "loss": 3.1942, "step": 155600 }, { "epoch": 1.374538582454653, "grad_norm": 2.492576837539673, "learning_rate": 8.264877966259577e-05, "loss": 3.0843, "step": 155650 }, { "epoch": 1.3749801303449372, "grad_norm": 3.6967947483062744, "learning_rate": 8.26382537280425e-05, "loss": 3.0236, "step": 155700 }, { "epoch": 1.3754216782352213, "grad_norm": 2.7098875045776367, "learning_rate": 8.262772527246094e-05, "loss": 3.6515, "step": 155750 }, { "epoch": 1.3758632261255055, "grad_norm": 2.622032642364502, "learning_rate": 8.261719429666426e-05, "loss": 3.2831, "step": 155800 }, { "epoch": 1.3763047740157899, "grad_norm": 4.229182243347168, "learning_rate": 8.26066608014659e-05, "loss": 3.383, "step": 155850 }, { "epoch": 1.376746321906074, "grad_norm": 2.306593179702759, "learning_rate": 8.259612478767949e-05, "loss": 2.8599, "step": 155900 }, { "epoch": 1.3771878697963582, "grad_norm": 4.549513339996338, "learning_rate": 8.258579705141899e-05, "loss": 2.933, "step": 155950 }, { "epoch": 1.3776294176866424, "grad_norm": 5.794224262237549, "learning_rate": 8.257525605322936e-05, "loss": 3.1477, "step": 156000 }, { "epoch": 1.3776294176866424, "eval_asr_loss": 0.9160445507592706, "eval_loss": 2.932405471801758, "eval_runtime": 20.6052, "eval_samples_per_second": 37.272, "eval_steps_per_second": 9.318, "eval_tts_loss": 6.016129709398038, "step": 156000 }, { "epoch": 1.3780709655769265, "grad_norm": 1.7329176664352417, "learning_rate": 8.256471253887742e-05, "loss": 3.2825, "step": 156050 }, { "epoch": 1.3785125134672107, "grad_norm": 4.851804256439209, "learning_rate": 8.255416650917758e-05, "loss": 3.6591, "step": 156100 }, { "epoch": 1.3789540613574949, "grad_norm": 0.9085479378700256, "learning_rate": 8.254361796494443e-05, "loss": 2.9981, "step": 156150 }, { "epoch": 1.379395609247779, "grad_norm": 1.4775174856185913, "learning_rate": 8.253306690699277e-05, "loss": 3.4719, "step": 156200 }, { "epoch": 1.3798371571380632, "grad_norm": 2.0856218338012695, "learning_rate": 8.252251333613755e-05, "loss": 3.1563, "step": 156250 }, { "epoch": 1.3802787050283474, "grad_norm": 1.0467510223388672, "learning_rate": 8.251195725319394e-05, "loss": 3.5563, "step": 156300 }, { "epoch": 1.3807202529186315, "grad_norm": 1.6420974731445312, "learning_rate": 8.250139865897735e-05, "loss": 3.1661, "step": 156350 }, { "epoch": 1.3811618008089157, "grad_norm": 1.7073570489883423, "learning_rate": 8.249083755430332e-05, "loss": 3.0233, "step": 156400 }, { "epoch": 1.3816033486991999, "grad_norm": 3.6677961349487305, "learning_rate": 8.248027393998758e-05, "loss": 3.0304, "step": 156450 }, { "epoch": 1.382044896589484, "grad_norm": 3.6211111545562744, "learning_rate": 8.246970781684611e-05, "loss": 3.0123, "step": 156500 }, { "epoch": 1.3824864444797682, "grad_norm": 2.3987412452697754, "learning_rate": 8.245913918569506e-05, "loss": 3.2113, "step": 156550 }, { "epoch": 1.3829279923700524, "grad_norm": 3.1175930500030518, "learning_rate": 8.244856804735076e-05, "loss": 3.2382, "step": 156600 }, { "epoch": 1.3833695402603365, "grad_norm": 1.2450615167617798, "learning_rate": 8.243799440262972e-05, "loss": 3.434, "step": 156650 }, { "epoch": 1.3838110881506207, "grad_norm": 2.2003071308135986, "learning_rate": 8.242741825234866e-05, "loss": 3.3624, "step": 156700 }, { "epoch": 1.3842526360409049, "grad_norm": 3.6320629119873047, "learning_rate": 8.241683959732453e-05, "loss": 3.0379, "step": 156750 }, { "epoch": 1.3846941839311893, "grad_norm": 2.458106756210327, "learning_rate": 8.240625843837444e-05, "loss": 3.3652, "step": 156800 }, { "epoch": 1.3851357318214734, "grad_norm": 2.492220878601074, "learning_rate": 8.239567477631569e-05, "loss": 3.3358, "step": 156850 }, { "epoch": 1.3855772797117576, "grad_norm": 1.127402424812317, "learning_rate": 8.238508861196574e-05, "loss": 3.317, "step": 156900 }, { "epoch": 1.3860188276020418, "grad_norm": 2.1057465076446533, "learning_rate": 8.237449994614234e-05, "loss": 3.3922, "step": 156950 }, { "epoch": 1.386460375492326, "grad_norm": 0.6556951999664307, "learning_rate": 8.236390877966332e-05, "loss": 3.4174, "step": 157000 }, { "epoch": 1.38690192338261, "grad_norm": 2.198021173477173, "learning_rate": 8.235331511334681e-05, "loss": 3.4776, "step": 157050 }, { "epoch": 1.3873434712728943, "grad_norm": 2.8309218883514404, "learning_rate": 8.234271894801105e-05, "loss": 2.996, "step": 157100 }, { "epoch": 1.3877850191631784, "grad_norm": 3.436307430267334, "learning_rate": 8.23321202844745e-05, "loss": 3.3207, "step": 157150 }, { "epoch": 1.3882265670534626, "grad_norm": 5.650524139404297, "learning_rate": 8.232151912355583e-05, "loss": 3.2116, "step": 157200 }, { "epoch": 1.3886681149437468, "grad_norm": 3.0345571041107178, "learning_rate": 8.231091546607388e-05, "loss": 3.2176, "step": 157250 }, { "epoch": 1.389109662834031, "grad_norm": 1.683851718902588, "learning_rate": 8.230030931284771e-05, "loss": 3.0684, "step": 157300 }, { "epoch": 1.389551210724315, "grad_norm": 3.6749675273895264, "learning_rate": 8.228970066469655e-05, "loss": 2.7936, "step": 157350 }, { "epoch": 1.3899927586145995, "grad_norm": 2.033708333969116, "learning_rate": 8.227908952243981e-05, "loss": 2.6264, "step": 157400 }, { "epoch": 1.3904343065048836, "grad_norm": 3.007284641265869, "learning_rate": 8.226847588689713e-05, "loss": 3.3704, "step": 157450 }, { "epoch": 1.3908758543951678, "grad_norm": 3.3445730209350586, "learning_rate": 8.22578597588883e-05, "loss": 2.9904, "step": 157500 }, { "epoch": 1.391317402285452, "grad_norm": 5.233058929443359, "learning_rate": 8.224724113923336e-05, "loss": 3.0608, "step": 157550 }, { "epoch": 1.3917589501757361, "grad_norm": 2.4576175212860107, "learning_rate": 8.223662002875249e-05, "loss": 3.3851, "step": 157600 }, { "epoch": 1.3922004980660203, "grad_norm": 3.7473318576812744, "learning_rate": 8.222599642826608e-05, "loss": 3.0497, "step": 157650 }, { "epoch": 1.3926420459563045, "grad_norm": 4.362736225128174, "learning_rate": 8.22153703385947e-05, "loss": 3.0953, "step": 157700 }, { "epoch": 1.3930835938465886, "grad_norm": 2.2473487854003906, "learning_rate": 8.220474176055917e-05, "loss": 3.354, "step": 157750 }, { "epoch": 1.3935251417368728, "grad_norm": 5.24487829208374, "learning_rate": 8.21941106949804e-05, "loss": 3.2924, "step": 157800 }, { "epoch": 1.393966689627157, "grad_norm": 5.974734783172607, "learning_rate": 8.218347714267958e-05, "loss": 2.666, "step": 157850 }, { "epoch": 1.3944082375174411, "grad_norm": 1.5689363479614258, "learning_rate": 8.217284110447807e-05, "loss": 3.3073, "step": 157900 }, { "epoch": 1.3948497854077253, "grad_norm": 2.4762063026428223, "learning_rate": 8.216220258119739e-05, "loss": 2.9431, "step": 157950 }, { "epoch": 1.3952913332980095, "grad_norm": 1.6257416009902954, "learning_rate": 8.21515615736593e-05, "loss": 3.1226, "step": 158000 }, { "epoch": 1.3957328811882936, "grad_norm": 1.930908203125, "learning_rate": 8.21409180826857e-05, "loss": 3.1131, "step": 158050 }, { "epoch": 1.3961744290785778, "grad_norm": 2.8949382305145264, "learning_rate": 8.213027210909874e-05, "loss": 3.1718, "step": 158100 }, { "epoch": 1.396615976968862, "grad_norm": 3.7047338485717773, "learning_rate": 8.21196236537207e-05, "loss": 3.3025, "step": 158150 }, { "epoch": 1.3970575248591461, "grad_norm": 3.0060231685638428, "learning_rate": 8.210897271737412e-05, "loss": 3.3839, "step": 158200 }, { "epoch": 1.3974990727494303, "grad_norm": 2.41214919090271, "learning_rate": 8.209831930088167e-05, "loss": 3.3249, "step": 158250 }, { "epoch": 1.3979406206397145, "grad_norm": 1.3485262393951416, "learning_rate": 8.208766340506623e-05, "loss": 3.095, "step": 158300 }, { "epoch": 1.3983821685299986, "grad_norm": 3.0010628700256348, "learning_rate": 8.207700503075089e-05, "loss": 3.0024, "step": 158350 }, { "epoch": 1.398823716420283, "grad_norm": 1.9099316596984863, "learning_rate": 8.206634417875891e-05, "loss": 3.6751, "step": 158400 }, { "epoch": 1.3992652643105672, "grad_norm": 1.0043919086456299, "learning_rate": 8.205568084991377e-05, "loss": 3.402, "step": 158450 }, { "epoch": 1.3997068122008514, "grad_norm": 0.9302772283554077, "learning_rate": 8.204501504503913e-05, "loss": 3.4862, "step": 158500 }, { "epoch": 1.4001483600911355, "grad_norm": 3.9256772994995117, "learning_rate": 8.203434676495876e-05, "loss": 3.1125, "step": 158550 }, { "epoch": 1.4005899079814197, "grad_norm": 1.6073594093322754, "learning_rate": 8.202367601049678e-05, "loss": 3.2281, "step": 158600 }, { "epoch": 1.4010314558717039, "grad_norm": 1.277282953262329, "learning_rate": 8.201300278247736e-05, "loss": 3.1331, "step": 158650 }, { "epoch": 1.401473003761988, "grad_norm": 2.780365467071533, "learning_rate": 8.200232708172495e-05, "loss": 3.6974, "step": 158700 }, { "epoch": 1.4019145516522722, "grad_norm": 3.2038660049438477, "learning_rate": 8.199164890906415e-05, "loss": 3.2937, "step": 158750 }, { "epoch": 1.4023560995425564, "grad_norm": 5.772561073303223, "learning_rate": 8.198096826531974e-05, "loss": 3.1028, "step": 158800 }, { "epoch": 1.4027976474328405, "grad_norm": 2.543661117553711, "learning_rate": 8.197028515131673e-05, "loss": 3.1588, "step": 158850 }, { "epoch": 1.4032391953231247, "grad_norm": 2.7590420246124268, "learning_rate": 8.195959956788031e-05, "loss": 3.1061, "step": 158900 }, { "epoch": 1.4036807432134089, "grad_norm": 2.1173219680786133, "learning_rate": 8.194891151583579e-05, "loss": 3.132, "step": 158950 }, { "epoch": 1.4041222911036932, "grad_norm": 1.821299433708191, "learning_rate": 8.19382209960088e-05, "loss": 3.4133, "step": 159000 }, { "epoch": 1.4041222911036932, "eval_asr_loss": 0.9242424718126228, "eval_loss": 2.9176743030548096, "eval_runtime": 20.4126, "eval_samples_per_second": 37.624, "eval_steps_per_second": 9.406, "eval_tts_loss": 5.999804931002174, "step": 159000 }, { "epoch": 1.4045638389939774, "grad_norm": 2.2017438411712646, "learning_rate": 8.192752800922508e-05, "loss": 3.2473, "step": 159050 }, { "epoch": 1.4050053868842616, "grad_norm": 5.148829460144043, "learning_rate": 8.191683255631053e-05, "loss": 3.1951, "step": 159100 }, { "epoch": 1.4054469347745457, "grad_norm": 2.3548662662506104, "learning_rate": 8.190613463809134e-05, "loss": 3.5011, "step": 159150 }, { "epoch": 1.40588848266483, "grad_norm": 3.541984796524048, "learning_rate": 8.189543425539378e-05, "loss": 2.8037, "step": 159200 }, { "epoch": 1.406330030555114, "grad_norm": 2.6860177516937256, "learning_rate": 8.18847314090444e-05, "loss": 3.3826, "step": 159250 }, { "epoch": 1.4067715784453982, "grad_norm": 2.60087251663208, "learning_rate": 8.187402609986989e-05, "loss": 3.7379, "step": 159300 }, { "epoch": 1.4072131263356824, "grad_norm": 4.26237154006958, "learning_rate": 8.186331832869715e-05, "loss": 2.9288, "step": 159350 }, { "epoch": 1.4076546742259666, "grad_norm": 2.6659810543060303, "learning_rate": 8.185260809635326e-05, "loss": 3.4246, "step": 159400 }, { "epoch": 1.4080962221162507, "grad_norm": 3.74465012550354, "learning_rate": 8.184189540366551e-05, "loss": 2.8438, "step": 159450 }, { "epoch": 1.408537770006535, "grad_norm": 4.397085189819336, "learning_rate": 8.183118025146134e-05, "loss": 3.489, "step": 159500 }, { "epoch": 1.408979317896819, "grad_norm": 4.9571452140808105, "learning_rate": 8.182046264056842e-05, "loss": 3.2969, "step": 159550 }, { "epoch": 1.4094208657871032, "grad_norm": 1.2717839479446411, "learning_rate": 8.180974257181457e-05, "loss": 3.4956, "step": 159600 }, { "epoch": 1.4098624136773874, "grad_norm": 2.1157987117767334, "learning_rate": 8.179902004602787e-05, "loss": 3.4988, "step": 159650 }, { "epoch": 1.4103039615676716, "grad_norm": 2.7032673358917236, "learning_rate": 8.178829506403651e-05, "loss": 3.6502, "step": 159700 }, { "epoch": 1.4107455094579557, "grad_norm": 1.5434041023254395, "learning_rate": 8.177756762666892e-05, "loss": 3.6715, "step": 159750 }, { "epoch": 1.41118705734824, "grad_norm": 2.1543235778808594, "learning_rate": 8.17668377347537e-05, "loss": 3.4379, "step": 159800 }, { "epoch": 1.411628605238524, "grad_norm": 2.2687978744506836, "learning_rate": 8.175610538911961e-05, "loss": 3.2811, "step": 159850 }, { "epoch": 1.4120701531288082, "grad_norm": 3.682598829269409, "learning_rate": 8.174537059059571e-05, "loss": 3.3635, "step": 159900 }, { "epoch": 1.4125117010190924, "grad_norm": 3.4800243377685547, "learning_rate": 8.173463334001109e-05, "loss": 3.2212, "step": 159950 }, { "epoch": 1.4129532489093768, "grad_norm": 2.2715086936950684, "learning_rate": 8.172389363819514e-05, "loss": 3.5366, "step": 160000 }, { "epoch": 1.413394796799661, "grad_norm": 4.865034103393555, "learning_rate": 8.171315148597744e-05, "loss": 3.4287, "step": 160050 }, { "epoch": 1.4138363446899451, "grad_norm": 1.0195260047912598, "learning_rate": 8.17024068841877e-05, "loss": 3.4387, "step": 160100 }, { "epoch": 1.4142778925802293, "grad_norm": 0.7502059936523438, "learning_rate": 8.169165983365588e-05, "loss": 2.8933, "step": 160150 }, { "epoch": 1.4147194404705135, "grad_norm": 1.9067836999893188, "learning_rate": 8.168091033521204e-05, "loss": 3.5911, "step": 160200 }, { "epoch": 1.4151609883607976, "grad_norm": 1.9587763547897339, "learning_rate": 8.167015838968653e-05, "loss": 3.1981, "step": 160250 }, { "epoch": 1.4156025362510818, "grad_norm": 2.4803414344787598, "learning_rate": 8.165940399790986e-05, "loss": 2.9892, "step": 160300 }, { "epoch": 1.416044084141366, "grad_norm": 1.6044929027557373, "learning_rate": 8.164864716071266e-05, "loss": 3.2477, "step": 160350 }, { "epoch": 1.4164856320316501, "grad_norm": 2.8173727989196777, "learning_rate": 8.163788787892586e-05, "loss": 2.7043, "step": 160400 }, { "epoch": 1.4169271799219343, "grad_norm": 2.8519928455352783, "learning_rate": 8.16271261533805e-05, "loss": 3.2307, "step": 160450 }, { "epoch": 1.4173687278122185, "grad_norm": 2.7776854038238525, "learning_rate": 8.161636198490784e-05, "loss": 3.343, "step": 160500 }, { "epoch": 1.4178102757025026, "grad_norm": 2.121873378753662, "learning_rate": 8.160559537433932e-05, "loss": 3.2525, "step": 160550 }, { "epoch": 1.418251823592787, "grad_norm": 2.0176658630371094, "learning_rate": 8.159482632250655e-05, "loss": 3.5169, "step": 160600 }, { "epoch": 1.4186933714830712, "grad_norm": 2.57165265083313, "learning_rate": 8.158405483024138e-05, "loss": 3.3809, "step": 160650 }, { "epoch": 1.4191349193733553, "grad_norm": 0.6696668267250061, "learning_rate": 8.15732808983758e-05, "loss": 2.9887, "step": 160700 }, { "epoch": 1.4195764672636395, "grad_norm": 2.2909727096557617, "learning_rate": 8.156250452774198e-05, "loss": 3.1004, "step": 160750 }, { "epoch": 1.4200180151539237, "grad_norm": 8.691178321838379, "learning_rate": 8.155172571917233e-05, "loss": 2.8258, "step": 160800 }, { "epoch": 1.4204595630442078, "grad_norm": 3.109480381011963, "learning_rate": 8.154094447349943e-05, "loss": 3.3217, "step": 160850 }, { "epoch": 1.420901110934492, "grad_norm": 4.277402400970459, "learning_rate": 8.153016079155604e-05, "loss": 3.426, "step": 160900 }, { "epoch": 1.4213426588247762, "grad_norm": 1.419296145439148, "learning_rate": 8.151937467417508e-05, "loss": 3.1784, "step": 160950 }, { "epoch": 1.4217842067150603, "grad_norm": 1.5565255880355835, "learning_rate": 8.150858612218972e-05, "loss": 3.0897, "step": 161000 }, { "epoch": 1.4222257546053445, "grad_norm": 3.24021315574646, "learning_rate": 8.149779513643325e-05, "loss": 3.2465, "step": 161050 }, { "epoch": 1.4226673024956287, "grad_norm": 5.220244407653809, "learning_rate": 8.148721760995049e-05, "loss": 3.3344, "step": 161100 }, { "epoch": 1.4231088503859128, "grad_norm": 1.918340802192688, "learning_rate": 8.147642180778646e-05, "loss": 3.5066, "step": 161150 }, { "epoch": 1.423550398276197, "grad_norm": 3.6554203033447266, "learning_rate": 8.146562357433577e-05, "loss": 3.2737, "step": 161200 }, { "epoch": 1.4239919461664812, "grad_norm": 1.7127411365509033, "learning_rate": 8.145482291043247e-05, "loss": 3.3952, "step": 161250 }, { "epoch": 1.4244334940567653, "grad_norm": 1.1752212047576904, "learning_rate": 8.144401981691085e-05, "loss": 2.9625, "step": 161300 }, { "epoch": 1.4248750419470495, "grad_norm": 1.228081464767456, "learning_rate": 8.143321429460533e-05, "loss": 3.2811, "step": 161350 }, { "epoch": 1.4253165898373337, "grad_norm": 3.1649067401885986, "learning_rate": 8.142240634435052e-05, "loss": 3.2813, "step": 161400 }, { "epoch": 1.4257581377276178, "grad_norm": 2.143840789794922, "learning_rate": 8.141159596698131e-05, "loss": 3.4526, "step": 161450 }, { "epoch": 1.426199685617902, "grad_norm": 4.533790588378906, "learning_rate": 8.140078316333266e-05, "loss": 3.4879, "step": 161500 }, { "epoch": 1.4266412335081862, "grad_norm": 3.0826544761657715, "learning_rate": 8.138996793423979e-05, "loss": 3.1003, "step": 161550 }, { "epoch": 1.4270827813984706, "grad_norm": 8.362160682678223, "learning_rate": 8.137915028053806e-05, "loss": 2.9193, "step": 161600 }, { "epoch": 1.4275243292887547, "grad_norm": 3.49751353263855, "learning_rate": 8.136854662836015e-05, "loss": 3.1116, "step": 161650 }, { "epoch": 1.427965877179039, "grad_norm": 1.2915788888931274, "learning_rate": 8.135772417639821e-05, "loss": 3.0923, "step": 161700 }, { "epoch": 1.428407425069323, "grad_norm": 2.0796115398406982, "learning_rate": 8.134689930231797e-05, "loss": 3.4672, "step": 161750 }, { "epoch": 1.4288489729596072, "grad_norm": 1.242505431175232, "learning_rate": 8.133607200695558e-05, "loss": 3.2748, "step": 161800 }, { "epoch": 1.4292905208498914, "grad_norm": 5.859312057495117, "learning_rate": 8.132524229114736e-05, "loss": 2.9677, "step": 161850 }, { "epoch": 1.4297320687401756, "grad_norm": 2.893573522567749, "learning_rate": 8.131441015572977e-05, "loss": 3.486, "step": 161900 }, { "epoch": 1.4301736166304597, "grad_norm": 2.6132123470306396, "learning_rate": 8.130357560153958e-05, "loss": 3.5871, "step": 161950 }, { "epoch": 1.4306151645207439, "grad_norm": 2.7765181064605713, "learning_rate": 8.129273862941361e-05, "loss": 2.9948, "step": 162000 }, { "epoch": 1.4306151645207439, "eval_asr_loss": 0.9175770946416757, "eval_loss": 2.9294345378875732, "eval_runtime": 20.6294, "eval_samples_per_second": 37.228, "eval_steps_per_second": 9.307, "eval_tts_loss": 6.006233162385569, "step": 162000 }, { "epoch": 1.431056712411028, "grad_norm": 2.137802839279175, "learning_rate": 8.128189924018891e-05, "loss": 3.1945, "step": 162050 }, { "epoch": 1.4314982603013122, "grad_norm": 3.257875442504883, "learning_rate": 8.127105743470277e-05, "loss": 3.7324, "step": 162100 }, { "epoch": 1.4319398081915964, "grad_norm": 4.234621047973633, "learning_rate": 8.126021321379263e-05, "loss": 3.1456, "step": 162150 }, { "epoch": 1.4323813560818808, "grad_norm": 2.000067710876465, "learning_rate": 8.124936657829607e-05, "loss": 3.0195, "step": 162200 }, { "epoch": 1.432822903972165, "grad_norm": 0.7076943516731262, "learning_rate": 8.123851752905096e-05, "loss": 3.2913, "step": 162250 }, { "epoch": 1.433264451862449, "grad_norm": 1.7147762775421143, "learning_rate": 8.122766606689523e-05, "loss": 3.3676, "step": 162300 }, { "epoch": 1.4337059997527333, "grad_norm": 1.8127288818359375, "learning_rate": 8.121681219266713e-05, "loss": 3.4248, "step": 162350 }, { "epoch": 1.4341475476430174, "grad_norm": 2.52752685546875, "learning_rate": 8.120595590720499e-05, "loss": 3.3246, "step": 162400 }, { "epoch": 1.4345890955333016, "grad_norm": 2.496375322341919, "learning_rate": 8.119509721134735e-05, "loss": 3.0343, "step": 162450 }, { "epoch": 1.4350306434235858, "grad_norm": 0.9835373163223267, "learning_rate": 8.118423610593301e-05, "loss": 3.2281, "step": 162500 }, { "epoch": 1.43547219131387, "grad_norm": 3.728062629699707, "learning_rate": 8.117337259180084e-05, "loss": 3.1614, "step": 162550 }, { "epoch": 1.435913739204154, "grad_norm": 4.0812907218933105, "learning_rate": 8.116250666979e-05, "loss": 3.3182, "step": 162600 }, { "epoch": 1.4363552870944383, "grad_norm": 1.9645757675170898, "learning_rate": 8.115163834073977e-05, "loss": 3.0928, "step": 162650 }, { "epoch": 1.4367968349847224, "grad_norm": 2.8638205528259277, "learning_rate": 8.114076760548963e-05, "loss": 3.1087, "step": 162700 }, { "epoch": 1.4372383828750066, "grad_norm": 4.352683067321777, "learning_rate": 8.112989446487927e-05, "loss": 3.3871, "step": 162750 }, { "epoch": 1.4376799307652908, "grad_norm": 3.6510112285614014, "learning_rate": 8.11190189197485e-05, "loss": 2.9083, "step": 162800 }, { "epoch": 1.438121478655575, "grad_norm": 1.543848991394043, "learning_rate": 8.110814097093743e-05, "loss": 3.3145, "step": 162850 }, { "epoch": 1.438563026545859, "grad_norm": 1.975798487663269, "learning_rate": 8.109726061928627e-05, "loss": 3.253, "step": 162900 }, { "epoch": 1.4390045744361433, "grad_norm": 3.0668487548828125, "learning_rate": 8.10863778656354e-05, "loss": 3.3423, "step": 162950 }, { "epoch": 1.4394461223264274, "grad_norm": 0.9884811639785767, "learning_rate": 8.107549271082545e-05, "loss": 2.8906, "step": 163000 }, { "epoch": 1.4398876702167116, "grad_norm": 2.0988800525665283, "learning_rate": 8.106460515569721e-05, "loss": 3.0113, "step": 163050 }, { "epoch": 1.4403292181069958, "grad_norm": 2.1560781002044678, "learning_rate": 8.105371520109163e-05, "loss": 3.366, "step": 163100 }, { "epoch": 1.44077076599728, "grad_norm": 3.452460765838623, "learning_rate": 8.104282284784989e-05, "loss": 3.4599, "step": 163150 }, { "epoch": 1.4412123138875643, "grad_norm": 2.827583074569702, "learning_rate": 8.10319280968133e-05, "loss": 3.0957, "step": 163200 }, { "epoch": 1.4416538617778485, "grad_norm": 5.022325038909912, "learning_rate": 8.102103094882343e-05, "loss": 2.9519, "step": 163250 }, { "epoch": 1.4420954096681327, "grad_norm": 2.3892874717712402, "learning_rate": 8.101013140472195e-05, "loss": 3.0009, "step": 163300 }, { "epoch": 1.4425369575584168, "grad_norm": 2.8516552448272705, "learning_rate": 8.09992294653508e-05, "loss": 3.0863, "step": 163350 }, { "epoch": 1.442978505448701, "grad_norm": 1.9601846933364868, "learning_rate": 8.098832513155203e-05, "loss": 3.2776, "step": 163400 }, { "epoch": 1.4434200533389852, "grad_norm": 2.186326503753662, "learning_rate": 8.097741840416791e-05, "loss": 3.3627, "step": 163450 }, { "epoch": 1.4438616012292693, "grad_norm": 4.046080589294434, "learning_rate": 8.096650928404092e-05, "loss": 2.9213, "step": 163500 }, { "epoch": 1.4443031491195535, "grad_norm": 2.9167070388793945, "learning_rate": 8.095559777201365e-05, "loss": 3.2593, "step": 163550 }, { "epoch": 1.4447446970098377, "grad_norm": 1.3748699426651, "learning_rate": 8.094468386892896e-05, "loss": 3.3712, "step": 163600 }, { "epoch": 1.4451862449001218, "grad_norm": 2.320512056350708, "learning_rate": 8.093376757562985e-05, "loss": 3.0272, "step": 163650 }, { "epoch": 1.445627792790406, "grad_norm": 7.251769542694092, "learning_rate": 8.092284889295949e-05, "loss": 3.4075, "step": 163700 }, { "epoch": 1.4460693406806902, "grad_norm": 2.5889413356781006, "learning_rate": 8.091192782176129e-05, "loss": 2.8281, "step": 163750 }, { "epoch": 1.4465108885709745, "grad_norm": 2.878997325897217, "learning_rate": 8.090100436287877e-05, "loss": 3.5972, "step": 163800 }, { "epoch": 1.4469524364612587, "grad_norm": 1.8390371799468994, "learning_rate": 8.089007851715571e-05, "loss": 3.3453, "step": 163850 }, { "epoch": 1.4473939843515429, "grad_norm": 2.4774179458618164, "learning_rate": 8.087915028543603e-05, "loss": 2.9448, "step": 163900 }, { "epoch": 1.447835532241827, "grad_norm": 2.5568761825561523, "learning_rate": 8.086821966856381e-05, "loss": 2.6824, "step": 163950 }, { "epoch": 1.4482770801321112, "grad_norm": 2.712879180908203, "learning_rate": 8.085728666738341e-05, "loss": 3.2722, "step": 164000 }, { "epoch": 1.4487186280223954, "grad_norm": 2.8086280822753906, "learning_rate": 8.084635128273926e-05, "loss": 3.4506, "step": 164050 }, { "epoch": 1.4491601759126795, "grad_norm": 2.9732656478881836, "learning_rate": 8.083541351547603e-05, "loss": 3.462, "step": 164100 }, { "epoch": 1.4496017238029637, "grad_norm": 3.0905792713165283, "learning_rate": 8.082447336643861e-05, "loss": 2.8562, "step": 164150 }, { "epoch": 1.4500432716932479, "grad_norm": 2.6996688842773438, "learning_rate": 8.0813530836472e-05, "loss": 3.3813, "step": 164200 }, { "epoch": 1.450484819583532, "grad_norm": 1.9003500938415527, "learning_rate": 8.08025859264214e-05, "loss": 3.3731, "step": 164250 }, { "epoch": 1.4509263674738162, "grad_norm": 1.5996307134628296, "learning_rate": 8.079163863713227e-05, "loss": 3.1851, "step": 164300 }, { "epoch": 1.4513679153641004, "grad_norm": 1.505954384803772, "learning_rate": 8.078068896945013e-05, "loss": 3.2188, "step": 164350 }, { "epoch": 1.4518094632543845, "grad_norm": 3.6162900924682617, "learning_rate": 8.076973692422081e-05, "loss": 3.2365, "step": 164400 }, { "epoch": 1.4522510111446687, "grad_norm": 2.8672714233398438, "learning_rate": 8.07587825022902e-05, "loss": 3.571, "step": 164450 }, { "epoch": 1.4526925590349529, "grad_norm": 1.8721016645431519, "learning_rate": 8.07478257045045e-05, "loss": 2.9197, "step": 164500 }, { "epoch": 1.453134106925237, "grad_norm": 3.9616005420684814, "learning_rate": 8.073686653170997e-05, "loss": 3.3142, "step": 164550 }, { "epoch": 1.4535756548155212, "grad_norm": 1.1517298221588135, "learning_rate": 8.072590498475316e-05, "loss": 3.1608, "step": 164600 }, { "epoch": 1.4540172027058054, "grad_norm": 3.2245967388153076, "learning_rate": 8.071494106448074e-05, "loss": 3.3427, "step": 164650 }, { "epoch": 1.4544587505960895, "grad_norm": 1.755993366241455, "learning_rate": 8.070397477173957e-05, "loss": 3.0496, "step": 164700 }, { "epoch": 1.4549002984863737, "grad_norm": 3.7342405319213867, "learning_rate": 8.06930061073767e-05, "loss": 3.2357, "step": 164750 }, { "epoch": 1.455341846376658, "grad_norm": 2.7675912380218506, "learning_rate": 8.06820350722394e-05, "loss": 3.2516, "step": 164800 }, { "epoch": 1.4557833942669423, "grad_norm": 2.1434454917907715, "learning_rate": 8.067106166717504e-05, "loss": 2.7675, "step": 164850 }, { "epoch": 1.4562249421572264, "grad_norm": 1.979413390159607, "learning_rate": 8.066008589303127e-05, "loss": 3.609, "step": 164900 }, { "epoch": 1.4566664900475106, "grad_norm": 4.171022891998291, "learning_rate": 8.064910775065584e-05, "loss": 3.2429, "step": 164950 }, { "epoch": 1.4571080379377948, "grad_norm": 11.221723556518555, "learning_rate": 8.063812724089674e-05, "loss": 2.7889, "step": 165000 }, { "epoch": 1.4571080379377948, "eval_asr_loss": 0.9156917675408397, "eval_loss": 2.9276485443115234, "eval_runtime": 20.3059, "eval_samples_per_second": 37.821, "eval_steps_per_second": 9.455, "eval_tts_loss": 5.998138508365599, "step": 165000 }, { "epoch": 1.457549585828079, "grad_norm": 5.32289981842041, "learning_rate": 8.062714436460213e-05, "loss": 3.4838, "step": 165050 }, { "epoch": 1.457991133718363, "grad_norm": 3.618535280227661, "learning_rate": 8.06161591226203e-05, "loss": 2.8096, "step": 165100 }, { "epoch": 1.4584326816086473, "grad_norm": 1.2320194244384766, "learning_rate": 8.060517151579983e-05, "loss": 3.3336, "step": 165150 }, { "epoch": 1.4588742294989314, "grad_norm": 1.9119493961334229, "learning_rate": 8.059418154498936e-05, "loss": 3.0939, "step": 165200 }, { "epoch": 1.4593157773892156, "grad_norm": 2.608098268508911, "learning_rate": 8.05831892110378e-05, "loss": 2.8796, "step": 165250 }, { "epoch": 1.4597573252794998, "grad_norm": 0.9068975448608398, "learning_rate": 8.05721945147942e-05, "loss": 3.6585, "step": 165300 }, { "epoch": 1.460198873169784, "grad_norm": 2.3178799152374268, "learning_rate": 8.056119745710784e-05, "loss": 3.5646, "step": 165350 }, { "epoch": 1.4606404210600683, "grad_norm": 1.2207263708114624, "learning_rate": 8.055019803882813e-05, "loss": 2.7127, "step": 165400 }, { "epoch": 1.4610819689503525, "grad_norm": 2.106874465942383, "learning_rate": 8.053919626080466e-05, "loss": 2.8729, "step": 165450 }, { "epoch": 1.4615235168406366, "grad_norm": 1.8500902652740479, "learning_rate": 8.052819212388725e-05, "loss": 3.3694, "step": 165500 }, { "epoch": 1.4619650647309208, "grad_norm": 3.1178908348083496, "learning_rate": 8.051718562892587e-05, "loss": 3.0963, "step": 165550 }, { "epoch": 1.462406612621205, "grad_norm": 3.855515718460083, "learning_rate": 8.050617677677069e-05, "loss": 3.8821, "step": 165600 }, { "epoch": 1.4628481605114891, "grad_norm": 1.1022988557815552, "learning_rate": 8.049516556827203e-05, "loss": 3.3908, "step": 165650 }, { "epoch": 1.4632897084017733, "grad_norm": 8.135658264160156, "learning_rate": 8.048415200428042e-05, "loss": 3.2926, "step": 165700 }, { "epoch": 1.4637312562920575, "grad_norm": 2.4983675479888916, "learning_rate": 8.047313608564658e-05, "loss": 3.1689, "step": 165750 }, { "epoch": 1.4641728041823416, "grad_norm": 3.4673867225646973, "learning_rate": 8.046211781322135e-05, "loss": 3.1294, "step": 165800 }, { "epoch": 1.4646143520726258, "grad_norm": 1.9203253984451294, "learning_rate": 8.045109718785586e-05, "loss": 3.5738, "step": 165850 }, { "epoch": 1.46505589996291, "grad_norm": 2.1311495304107666, "learning_rate": 8.044007421040132e-05, "loss": 3.3138, "step": 165900 }, { "epoch": 1.4654974478531941, "grad_norm": 3.7336440086364746, "learning_rate": 8.042904888170917e-05, "loss": 3.198, "step": 165950 }, { "epoch": 1.4659389957434783, "grad_norm": 3.2186074256896973, "learning_rate": 8.041802120263102e-05, "loss": 3.0895, "step": 166000 }, { "epoch": 1.4663805436337625, "grad_norm": 2.4467952251434326, "learning_rate": 8.040699117401868e-05, "loss": 3.1398, "step": 166050 }, { "epoch": 1.4668220915240466, "grad_norm": 1.0022852420806885, "learning_rate": 8.039595879672411e-05, "loss": 3.3921, "step": 166100 }, { "epoch": 1.4672636394143308, "grad_norm": 2.3139004707336426, "learning_rate": 8.038492407159947e-05, "loss": 3.372, "step": 166150 }, { "epoch": 1.467705187304615, "grad_norm": 3.8514606952667236, "learning_rate": 8.037388699949711e-05, "loss": 3.589, "step": 166200 }, { "epoch": 1.4681467351948991, "grad_norm": 2.21282958984375, "learning_rate": 8.036284758126953e-05, "loss": 3.3278, "step": 166250 }, { "epoch": 1.4685882830851833, "grad_norm": 0.6544881463050842, "learning_rate": 8.035180581776945e-05, "loss": 2.9854, "step": 166300 }, { "epoch": 1.4690298309754675, "grad_norm": 1.3662303686141968, "learning_rate": 8.034076170984974e-05, "loss": 2.9016, "step": 166350 }, { "epoch": 1.4694713788657519, "grad_norm": 2.929163932800293, "learning_rate": 8.032971525836348e-05, "loss": 4.1682, "step": 166400 }, { "epoch": 1.469912926756036, "grad_norm": 2.474565029144287, "learning_rate": 8.031866646416391e-05, "loss": 2.9599, "step": 166450 }, { "epoch": 1.4703544746463202, "grad_norm": 3.5583083629608154, "learning_rate": 8.030761532810444e-05, "loss": 3.309, "step": 166500 }, { "epoch": 1.4707960225366044, "grad_norm": 0.5456688404083252, "learning_rate": 8.029656185103868e-05, "loss": 3.472, "step": 166550 }, { "epoch": 1.4712375704268885, "grad_norm": 1.5131019353866577, "learning_rate": 8.028550603382045e-05, "loss": 3.2333, "step": 166600 }, { "epoch": 1.4716791183171727, "grad_norm": 2.514101028442383, "learning_rate": 8.027444787730367e-05, "loss": 2.9003, "step": 166650 }, { "epoch": 1.4721206662074569, "grad_norm": 2.0266590118408203, "learning_rate": 8.026338738234251e-05, "loss": 2.8004, "step": 166700 }, { "epoch": 1.472562214097741, "grad_norm": 2.531409740447998, "learning_rate": 8.025232454979133e-05, "loss": 3.714, "step": 166750 }, { "epoch": 1.4730037619880252, "grad_norm": 2.3518412113189697, "learning_rate": 8.024125938050458e-05, "loss": 3.3732, "step": 166800 }, { "epoch": 1.4734453098783094, "grad_norm": 5.866429328918457, "learning_rate": 8.023019187533699e-05, "loss": 3.3941, "step": 166850 }, { "epoch": 1.4738868577685935, "grad_norm": 1.075089454650879, "learning_rate": 8.021912203514341e-05, "loss": 2.9005, "step": 166900 }, { "epoch": 1.4743284056588777, "grad_norm": 1.00244140625, "learning_rate": 8.020804986077892e-05, "loss": 3.8404, "step": 166950 }, { "epoch": 1.474769953549162, "grad_norm": 3.055591106414795, "learning_rate": 8.019697535309873e-05, "loss": 3.1253, "step": 167000 }, { "epoch": 1.4752115014394462, "grad_norm": 3.31400203704834, "learning_rate": 8.018589851295826e-05, "loss": 3.6073, "step": 167050 }, { "epoch": 1.4756530493297304, "grad_norm": 2.1024818420410156, "learning_rate": 8.01748193412131e-05, "loss": 3.0527, "step": 167100 }, { "epoch": 1.4760945972200146, "grad_norm": 2.65166974067688, "learning_rate": 8.016373783871902e-05, "loss": 2.9383, "step": 167150 }, { "epoch": 1.4765361451102987, "grad_norm": 2.0548911094665527, "learning_rate": 8.015265400633197e-05, "loss": 2.886, "step": 167200 }, { "epoch": 1.476977693000583, "grad_norm": 2.2426414489746094, "learning_rate": 8.014156784490809e-05, "loss": 3.4212, "step": 167250 }, { "epoch": 1.477419240890867, "grad_norm": 1.2684077024459839, "learning_rate": 8.013047935530368e-05, "loss": 2.9928, "step": 167300 }, { "epoch": 1.4778607887811512, "grad_norm": 3.0890889167785645, "learning_rate": 8.011938853837524e-05, "loss": 3.1451, "step": 167350 }, { "epoch": 1.4783023366714354, "grad_norm": 6.254854202270508, "learning_rate": 8.010829539497943e-05, "loss": 2.9005, "step": 167400 }, { "epoch": 1.4787438845617196, "grad_norm": 2.656203508377075, "learning_rate": 8.009719992597313e-05, "loss": 3.1927, "step": 167450 }, { "epoch": 1.4791854324520037, "grad_norm": 2.7394678592681885, "learning_rate": 8.008610213221335e-05, "loss": 3.6834, "step": 167500 }, { "epoch": 1.479626980342288, "grad_norm": 4.921375274658203, "learning_rate": 8.007522403967907e-05, "loss": 3.184, "step": 167550 }, { "epoch": 1.480068528232572, "grad_norm": 5.552090167999268, "learning_rate": 8.00641216454365e-05, "loss": 3.1165, "step": 167600 }, { "epoch": 1.4805100761228562, "grad_norm": 1.2124018669128418, "learning_rate": 8.005301692899549e-05, "loss": 3.0754, "step": 167650 }, { "epoch": 1.4809516240131404, "grad_norm": 1.2512977123260498, "learning_rate": 8.004190989121377e-05, "loss": 2.6987, "step": 167700 }, { "epoch": 1.4813931719034246, "grad_norm": 4.063098907470703, "learning_rate": 8.003080053294925e-05, "loss": 3.5446, "step": 167750 }, { "epoch": 1.4818347197937087, "grad_norm": 3.4058032035827637, "learning_rate": 8.001968885506004e-05, "loss": 3.1717, "step": 167800 }, { "epoch": 1.482276267683993, "grad_norm": 1.3887379169464111, "learning_rate": 8.000857485840442e-05, "loss": 3.2566, "step": 167850 }, { "epoch": 1.482717815574277, "grad_norm": 3.54004168510437, "learning_rate": 7.999745854384086e-05, "loss": 2.7738, "step": 167900 }, { "epoch": 1.4831593634645612, "grad_norm": 0.8720212578773499, "learning_rate": 7.9986339912228e-05, "loss": 2.6591, "step": 167950 }, { "epoch": 1.4836009113548456, "grad_norm": 2.6884117126464844, "learning_rate": 7.997521896442467e-05, "loss": 2.9426, "step": 168000 }, { "epoch": 1.4836009113548456, "eval_asr_loss": 0.9104613695318818, "eval_loss": 2.9150123596191406, "eval_runtime": 20.6114, "eval_samples_per_second": 37.261, "eval_steps_per_second": 9.315, "eval_tts_loss": 5.9395940312615085, "step": 168000 }, { "epoch": 1.4840424592451298, "grad_norm": 1.3045971393585205, "learning_rate": 7.996409570128984e-05, "loss": 3.0543, "step": 168050 }, { "epoch": 1.484484007135414, "grad_norm": 1.5941531658172607, "learning_rate": 7.995297012368273e-05, "loss": 2.7356, "step": 168100 }, { "epoch": 1.4849255550256981, "grad_norm": 6.986129283905029, "learning_rate": 7.994184223246263e-05, "loss": 3.7545, "step": 168150 }, { "epoch": 1.4853671029159823, "grad_norm": 1.7898863554000854, "learning_rate": 7.993071202848916e-05, "loss": 3.2304, "step": 168200 }, { "epoch": 1.4858086508062665, "grad_norm": 1.5254120826721191, "learning_rate": 7.991957951262196e-05, "loss": 2.9966, "step": 168250 }, { "epoch": 1.4862501986965506, "grad_norm": 5.384498596191406, "learning_rate": 7.990844468572098e-05, "loss": 3.0987, "step": 168300 }, { "epoch": 1.4866917465868348, "grad_norm": 2.3323962688446045, "learning_rate": 7.989730754864624e-05, "loss": 2.9712, "step": 168350 }, { "epoch": 1.487133294477119, "grad_norm": 3.0033323764801025, "learning_rate": 7.988616810225804e-05, "loss": 3.1348, "step": 168400 }, { "epoch": 1.4875748423674031, "grad_norm": 3.1256043910980225, "learning_rate": 7.987502634741676e-05, "loss": 2.8878, "step": 168450 }, { "epoch": 1.4880163902576873, "grad_norm": 1.6998767852783203, "learning_rate": 7.986388228498303e-05, "loss": 3.1979, "step": 168500 }, { "epoch": 1.4884579381479714, "grad_norm": 1.2934191226959229, "learning_rate": 7.985273591581763e-05, "loss": 3.2321, "step": 168550 }, { "epoch": 1.4888994860382558, "grad_norm": 1.3229929208755493, "learning_rate": 7.984158724078154e-05, "loss": 3.4828, "step": 168600 }, { "epoch": 1.48934103392854, "grad_norm": 1.3975541591644287, "learning_rate": 7.983043626073587e-05, "loss": 3.2538, "step": 168650 }, { "epoch": 1.4897825818188242, "grad_norm": 0.7398980259895325, "learning_rate": 7.981928297654197e-05, "loss": 3.1933, "step": 168700 }, { "epoch": 1.4902241297091083, "grad_norm": 2.5116653442382812, "learning_rate": 7.980812738906129e-05, "loss": 3.2666, "step": 168750 }, { "epoch": 1.4906656775993925, "grad_norm": 4.653420925140381, "learning_rate": 7.979696949915556e-05, "loss": 2.9902, "step": 168800 }, { "epoch": 1.4911072254896767, "grad_norm": 2.0215892791748047, "learning_rate": 7.97858093076866e-05, "loss": 3.4902, "step": 168850 }, { "epoch": 1.4915487733799608, "grad_norm": 2.4749724864959717, "learning_rate": 7.977464681551646e-05, "loss": 3.2653, "step": 168900 }, { "epoch": 1.491990321270245, "grad_norm": 0.8826634883880615, "learning_rate": 7.97634820235073e-05, "loss": 2.8773, "step": 168950 }, { "epoch": 1.4924318691605292, "grad_norm": 2.755411386489868, "learning_rate": 7.975231493252157e-05, "loss": 3.1709, "step": 169000 }, { "epoch": 1.4928734170508133, "grad_norm": 1.458707332611084, "learning_rate": 7.97411455434218e-05, "loss": 3.195, "step": 169050 }, { "epoch": 1.4933149649410975, "grad_norm": 2.2169852256774902, "learning_rate": 7.972997385707073e-05, "loss": 3.069, "step": 169100 }, { "epoch": 1.4937565128313817, "grad_norm": 4.073314189910889, "learning_rate": 7.971879987433128e-05, "loss": 3.0165, "step": 169150 }, { "epoch": 1.4941980607216658, "grad_norm": 1.273754596710205, "learning_rate": 7.970762359606656e-05, "loss": 3.1862, "step": 169200 }, { "epoch": 1.49463960861195, "grad_norm": 2.835773468017578, "learning_rate": 7.96964450231398e-05, "loss": 3.2468, "step": 169250 }, { "epoch": 1.4950811565022342, "grad_norm": 3.9089925289154053, "learning_rate": 7.968526415641452e-05, "loss": 2.8939, "step": 169300 }, { "epoch": 1.4955227043925183, "grad_norm": 3.043555498123169, "learning_rate": 7.967408099675429e-05, "loss": 3.6233, "step": 169350 }, { "epoch": 1.4959642522828025, "grad_norm": 1.930454969406128, "learning_rate": 7.966289554502293e-05, "loss": 2.4929, "step": 169400 }, { "epoch": 1.4964058001730867, "grad_norm": 5.453582286834717, "learning_rate": 7.965170780208441e-05, "loss": 3.0559, "step": 169450 }, { "epoch": 1.4968473480633708, "grad_norm": 1.6898558139801025, "learning_rate": 7.964051776880292e-05, "loss": 3.0782, "step": 169500 }, { "epoch": 1.497288895953655, "grad_norm": 1.4121713638305664, "learning_rate": 7.962932544604277e-05, "loss": 3.6197, "step": 169550 }, { "epoch": 1.4977304438439394, "grad_norm": 6.987101078033447, "learning_rate": 7.96181308346685e-05, "loss": 3.3473, "step": 169600 }, { "epoch": 1.4981719917342236, "grad_norm": 3.6944234371185303, "learning_rate": 7.960693393554474e-05, "loss": 3.2968, "step": 169650 }, { "epoch": 1.4986135396245077, "grad_norm": 1.7384791374206543, "learning_rate": 7.959573474953643e-05, "loss": 3.2508, "step": 169700 }, { "epoch": 1.4990550875147919, "grad_norm": 3.552605152130127, "learning_rate": 7.958453327750854e-05, "loss": 3.7686, "step": 169750 }, { "epoch": 1.499496635405076, "grad_norm": 1.9855374097824097, "learning_rate": 7.957332952032634e-05, "loss": 3.5705, "step": 169800 }, { "epoch": 1.4999381832953602, "grad_norm": 2.5507972240448, "learning_rate": 7.956212347885521e-05, "loss": 3.1468, "step": 169850 }, { "epoch": 1.5003797311856444, "grad_norm": 2.040591239929199, "learning_rate": 7.955091515396073e-05, "loss": 3.2078, "step": 169900 }, { "epoch": 1.5008212790759285, "grad_norm": 2.0551323890686035, "learning_rate": 7.953970454650863e-05, "loss": 3.1355, "step": 169950 }, { "epoch": 1.5012628269662127, "grad_norm": 1.476214051246643, "learning_rate": 7.952849165736483e-05, "loss": 3.1195, "step": 170000 }, { "epoch": 1.5017043748564969, "grad_norm": 4.278891086578369, "learning_rate": 7.951727648739547e-05, "loss": 2.9388, "step": 170050 }, { "epoch": 1.5021459227467813, "grad_norm": 3.7804348468780518, "learning_rate": 7.950605903746679e-05, "loss": 3.2333, "step": 170100 }, { "epoch": 1.5025874706370654, "grad_norm": 2.9571738243103027, "learning_rate": 7.949483930844524e-05, "loss": 3.5166, "step": 170150 }, { "epoch": 1.5030290185273496, "grad_norm": 1.926645040512085, "learning_rate": 7.948361730119747e-05, "loss": 2.9482, "step": 170200 }, { "epoch": 1.5034705664176338, "grad_norm": 2.891768217086792, "learning_rate": 7.947261752459494e-05, "loss": 3.2691, "step": 170250 }, { "epoch": 1.503912114307918, "grad_norm": 2.981902599334717, "learning_rate": 7.946139100901666e-05, "loss": 3.146, "step": 170300 }, { "epoch": 1.504353662198202, "grad_norm": 5.165239334106445, "learning_rate": 7.945016221779575e-05, "loss": 3.2342, "step": 170350 }, { "epoch": 1.5047952100884863, "grad_norm": 2.168894052505493, "learning_rate": 7.943893115179954e-05, "loss": 3.3417, "step": 170400 }, { "epoch": 1.5052367579787704, "grad_norm": 1.8027621507644653, "learning_rate": 7.942769781189552e-05, "loss": 3.2269, "step": 170450 }, { "epoch": 1.5056783058690546, "grad_norm": 2.0628678798675537, "learning_rate": 7.941646219895141e-05, "loss": 3.0221, "step": 170500 }, { "epoch": 1.5061198537593388, "grad_norm": 2.1540791988372803, "learning_rate": 7.940522431383502e-05, "loss": 3.2091, "step": 170550 }, { "epoch": 1.506561401649623, "grad_norm": 2.1455180644989014, "learning_rate": 7.939398415741441e-05, "loss": 3.0678, "step": 170600 }, { "epoch": 1.507002949539907, "grad_norm": 1.412413477897644, "learning_rate": 7.938274173055777e-05, "loss": 3.4302, "step": 170650 }, { "epoch": 1.5074444974301913, "grad_norm": 1.9970897436141968, "learning_rate": 7.937149703413349e-05, "loss": 3.1186, "step": 170700 }, { "epoch": 1.5078860453204754, "grad_norm": 4.280227184295654, "learning_rate": 7.936025006901014e-05, "loss": 3.2719, "step": 170750 }, { "epoch": 1.5083275932107596, "grad_norm": 3.5186574459075928, "learning_rate": 7.93490008360564e-05, "loss": 3.2983, "step": 170800 }, { "epoch": 1.5087691411010438, "grad_norm": 1.2954347133636475, "learning_rate": 7.933774933614126e-05, "loss": 3.1792, "step": 170850 }, { "epoch": 1.509210688991328, "grad_norm": 2.387403726577759, "learning_rate": 7.932672066765598e-05, "loss": 2.9762, "step": 170900 }, { "epoch": 1.509652236881612, "grad_norm": 5.01585578918457, "learning_rate": 7.93154646817213e-05, "loss": 3.2197, "step": 170950 }, { "epoch": 1.5100937847718963, "grad_norm": 3.7455615997314453, "learning_rate": 7.930420643141558e-05, "loss": 3.4165, "step": 171000 }, { "epoch": 1.5100937847718963, "eval_asr_loss": 0.9273673528471224, "eval_loss": 2.9135005474090576, "eval_runtime": 20.4893, "eval_samples_per_second": 37.483, "eval_steps_per_second": 9.371, "eval_tts_loss": 5.9611810830116205, "step": 171000 }, { "epoch": 1.5105353326621804, "grad_norm": 1.949747920036316, "learning_rate": 7.92929459176084e-05, "loss": 3.3078, "step": 171050 }, { "epoch": 1.5109768805524646, "grad_norm": 3.1064343452453613, "learning_rate": 7.928168314116953e-05, "loss": 3.5604, "step": 171100 }, { "epoch": 1.5114184284427488, "grad_norm": 1.3786486387252808, "learning_rate": 7.927041810296896e-05, "loss": 3.3521, "step": 171150 }, { "epoch": 1.511859976333033, "grad_norm": 3.0906989574432373, "learning_rate": 7.925915080387679e-05, "loss": 3.719, "step": 171200 }, { "epoch": 1.512301524223317, "grad_norm": 1.4307838678359985, "learning_rate": 7.924788124476331e-05, "loss": 3.4073, "step": 171250 }, { "epoch": 1.5127430721136015, "grad_norm": 1.856938362121582, "learning_rate": 7.923660942649904e-05, "loss": 3.062, "step": 171300 }, { "epoch": 1.5131846200038856, "grad_norm": 2.1577956676483154, "learning_rate": 7.92253353499546e-05, "loss": 3.0501, "step": 171350 }, { "epoch": 1.5136261678941698, "grad_norm": 1.228046178817749, "learning_rate": 7.921405901600084e-05, "loss": 3.1004, "step": 171400 }, { "epoch": 1.514067715784454, "grad_norm": 2.502464771270752, "learning_rate": 7.920278042550875e-05, "loss": 2.6685, "step": 171450 }, { "epoch": 1.5145092636747381, "grad_norm": 3.5946199893951416, "learning_rate": 7.919149957934948e-05, "loss": 3.0592, "step": 171500 }, { "epoch": 1.5149508115650223, "grad_norm": 2.893009901046753, "learning_rate": 7.918021647839443e-05, "loss": 3.1573, "step": 171550 }, { "epoch": 1.5153923594553065, "grad_norm": 1.8947813510894775, "learning_rate": 7.91689311235151e-05, "loss": 3.3152, "step": 171600 }, { "epoch": 1.5158339073455906, "grad_norm": 3.0196502208709717, "learning_rate": 7.915764351558317e-05, "loss": 3.2163, "step": 171650 }, { "epoch": 1.516275455235875, "grad_norm": 3.1389248371124268, "learning_rate": 7.914635365547053e-05, "loss": 3.2143, "step": 171700 }, { "epoch": 1.5167170031261592, "grad_norm": 3.7226805686950684, "learning_rate": 7.913506154404924e-05, "loss": 3.1146, "step": 171750 }, { "epoch": 1.5171585510164434, "grad_norm": 1.4495065212249756, "learning_rate": 7.912376718219148e-05, "loss": 3.4301, "step": 171800 }, { "epoch": 1.5176000989067275, "grad_norm": 4.089288711547852, "learning_rate": 7.911247057076969e-05, "loss": 2.8631, "step": 171850 }, { "epoch": 1.5180416467970117, "grad_norm": 1.8996052742004395, "learning_rate": 7.91011717106564e-05, "loss": 3.3218, "step": 171900 }, { "epoch": 1.5184831946872959, "grad_norm": 4.082929611206055, "learning_rate": 7.908987060272437e-05, "loss": 3.0405, "step": 171950 }, { "epoch": 1.51892474257758, "grad_norm": 7.022767543792725, "learning_rate": 7.90785672478465e-05, "loss": 3.5964, "step": 172000 }, { "epoch": 1.5193662904678642, "grad_norm": 6.730228900909424, "learning_rate": 7.90672616468959e-05, "loss": 3.3314, "step": 172050 }, { "epoch": 1.5198078383581484, "grad_norm": 2.4673285484313965, "learning_rate": 7.905595380074582e-05, "loss": 3.2224, "step": 172100 }, { "epoch": 1.5202493862484325, "grad_norm": 2.011349678039551, "learning_rate": 7.904464371026971e-05, "loss": 3.4183, "step": 172150 }, { "epoch": 1.5206909341387167, "grad_norm": 1.7207785844802856, "learning_rate": 7.903333137634115e-05, "loss": 3.1136, "step": 172200 }, { "epoch": 1.5211324820290009, "grad_norm": 4.2923150062561035, "learning_rate": 7.902201679983393e-05, "loss": 3.1425, "step": 172250 }, { "epoch": 1.521574029919285, "grad_norm": 3.7688066959381104, "learning_rate": 7.901069998162203e-05, "loss": 3.4813, "step": 172300 }, { "epoch": 1.5220155778095692, "grad_norm": 1.0002552270889282, "learning_rate": 7.899938092257955e-05, "loss": 3.5722, "step": 172350 }, { "epoch": 1.5224571256998534, "grad_norm": 1.6142280101776123, "learning_rate": 7.89880596235808e-05, "loss": 2.8011, "step": 172400 }, { "epoch": 1.5228986735901375, "grad_norm": 5.061013698577881, "learning_rate": 7.897673608550026e-05, "loss": 2.8856, "step": 172450 }, { "epoch": 1.5233402214804217, "grad_norm": 4.347459316253662, "learning_rate": 7.896541030921256e-05, "loss": 3.1504, "step": 172500 }, { "epoch": 1.5237817693707059, "grad_norm": 1.6337541341781616, "learning_rate": 7.895408229559256e-05, "loss": 3.1389, "step": 172550 }, { "epoch": 1.52422331726099, "grad_norm": 3.7045750617980957, "learning_rate": 7.89427520455152e-05, "loss": 3.4723, "step": 172600 }, { "epoch": 1.5246648651512742, "grad_norm": 2.3034849166870117, "learning_rate": 7.893141955985568e-05, "loss": 3.1481, "step": 172650 }, { "epoch": 1.5251064130415584, "grad_norm": 1.3366230726242065, "learning_rate": 7.892008483948933e-05, "loss": 2.9249, "step": 172700 }, { "epoch": 1.5255479609318425, "grad_norm": 1.471297025680542, "learning_rate": 7.890874788529166e-05, "loss": 3.4342, "step": 172750 }, { "epoch": 1.5259895088221267, "grad_norm": 4.741721153259277, "learning_rate": 7.889740869813835e-05, "loss": 3.3231, "step": 172800 }, { "epoch": 1.526431056712411, "grad_norm": 1.3582066297531128, "learning_rate": 7.888606727890528e-05, "loss": 3.2317, "step": 172850 }, { "epoch": 1.5268726046026952, "grad_norm": 2.5067501068115234, "learning_rate": 7.887472362846844e-05, "loss": 3.1801, "step": 172900 }, { "epoch": 1.5273141524929794, "grad_norm": 2.211074113845825, "learning_rate": 7.886337774770407e-05, "loss": 3.4487, "step": 172950 }, { "epoch": 1.5277557003832636, "grad_norm": 2.118274450302124, "learning_rate": 7.88520296374885e-05, "loss": 3.4356, "step": 173000 }, { "epoch": 1.5281972482735477, "grad_norm": 2.6437795162200928, "learning_rate": 7.884067929869832e-05, "loss": 3.2181, "step": 173050 }, { "epoch": 1.528638796163832, "grad_norm": 7.714078426361084, "learning_rate": 7.882932673221022e-05, "loss": 3.0523, "step": 173100 }, { "epoch": 1.529080344054116, "grad_norm": 3.1765506267547607, "learning_rate": 7.881797193890108e-05, "loss": 2.9752, "step": 173150 }, { "epoch": 1.5295218919444002, "grad_norm": 1.7637883424758911, "learning_rate": 7.880661491964799e-05, "loss": 2.7156, "step": 173200 }, { "epoch": 1.5299634398346846, "grad_norm": 2.4958198070526123, "learning_rate": 7.879525567532818e-05, "loss": 3.2052, "step": 173250 }, { "epoch": 1.5304049877249688, "grad_norm": 1.5706040859222412, "learning_rate": 7.878389420681902e-05, "loss": 2.9912, "step": 173300 }, { "epoch": 1.530846535615253, "grad_norm": 5.228756904602051, "learning_rate": 7.877253051499813e-05, "loss": 3.2192, "step": 173350 }, { "epoch": 1.5312880835055371, "grad_norm": 4.856530666351318, "learning_rate": 7.876116460074325e-05, "loss": 2.5722, "step": 173400 }, { "epoch": 1.5317296313958213, "grad_norm": 1.8919997215270996, "learning_rate": 7.874979646493228e-05, "loss": 3.1431, "step": 173450 }, { "epoch": 1.5321711792861055, "grad_norm": 3.6233277320861816, "learning_rate": 7.873842610844332e-05, "loss": 3.6041, "step": 173500 }, { "epoch": 1.5326127271763896, "grad_norm": 0.6207433938980103, "learning_rate": 7.872705353215464e-05, "loss": 2.8251, "step": 173550 }, { "epoch": 1.5330542750666738, "grad_norm": 3.8003902435302734, "learning_rate": 7.871567873694468e-05, "loss": 3.3234, "step": 173600 }, { "epoch": 1.533495822956958, "grad_norm": 2.3610243797302246, "learning_rate": 7.870430172369204e-05, "loss": 2.9732, "step": 173650 }, { "epoch": 1.5339373708472421, "grad_norm": 3.0918922424316406, "learning_rate": 7.869292249327549e-05, "loss": 3.3379, "step": 173700 }, { "epoch": 1.5343789187375263, "grad_norm": 3.1839041709899902, "learning_rate": 7.868154104657399e-05, "loss": 3.1345, "step": 173750 }, { "epoch": 1.5348204666278105, "grad_norm": 2.7104201316833496, "learning_rate": 7.867015738446666e-05, "loss": 3.0175, "step": 173800 }, { "epoch": 1.5352620145180946, "grad_norm": 6.00855016708374, "learning_rate": 7.865877150783279e-05, "loss": 3.308, "step": 173850 }, { "epoch": 1.5357035624083788, "grad_norm": 1.8182517290115356, "learning_rate": 7.864738341755183e-05, "loss": 3.6535, "step": 173900 }, { "epoch": 1.536145110298663, "grad_norm": 1.9349044561386108, "learning_rate": 7.863599311450343e-05, "loss": 3.2833, "step": 173950 }, { "epoch": 1.5365866581889471, "grad_norm": 3.1747500896453857, "learning_rate": 7.862460059956738e-05, "loss": 3.1619, "step": 174000 }, { "epoch": 1.5365866581889471, "eval_asr_loss": 0.9183246925579431, "eval_loss": 2.908951997756958, "eval_runtime": 29.1305, "eval_samples_per_second": 26.364, "eval_steps_per_second": 6.591, "eval_tts_loss": 5.985057644157511, "step": 174000 }, { "epoch": 1.5370282060792313, "grad_norm": 3.5058462619781494, "learning_rate": 7.861320587362366e-05, "loss": 2.9589, "step": 174050 }, { "epoch": 1.5374697539695155, "grad_norm": 1.7006515264511108, "learning_rate": 7.860180893755243e-05, "loss": 3.1892, "step": 174100 }, { "epoch": 1.5379113018597996, "grad_norm": 6.1886444091796875, "learning_rate": 7.8590409792234e-05, "loss": 2.8409, "step": 174150 }, { "epoch": 1.5383528497500838, "grad_norm": 5.43979549407959, "learning_rate": 7.857900843854884e-05, "loss": 3.1715, "step": 174200 }, { "epoch": 1.538794397640368, "grad_norm": 1.8094000816345215, "learning_rate": 7.856760487737763e-05, "loss": 3.4528, "step": 174250 }, { "epoch": 1.5392359455306521, "grad_norm": 3.401740312576294, "learning_rate": 7.855619910960119e-05, "loss": 3.4312, "step": 174300 }, { "epoch": 1.5396774934209363, "grad_norm": 3.1682894229888916, "learning_rate": 7.854479113610052e-05, "loss": 3.1221, "step": 174350 }, { "epoch": 1.5401190413112205, "grad_norm": 5.100546360015869, "learning_rate": 7.853338095775677e-05, "loss": 3.1851, "step": 174400 }, { "epoch": 1.5405605892015048, "grad_norm": 0.7844409346580505, "learning_rate": 7.85219685754513e-05, "loss": 3.2288, "step": 174450 }, { "epoch": 1.541002137091789, "grad_norm": 2.428945302963257, "learning_rate": 7.851055399006565e-05, "loss": 2.9583, "step": 174500 }, { "epoch": 1.5414436849820732, "grad_norm": 3.063448905944824, "learning_rate": 7.849913720248143e-05, "loss": 2.7642, "step": 174550 }, { "epoch": 1.5418852328723573, "grad_norm": 3.730762243270874, "learning_rate": 7.848771821358055e-05, "loss": 3.4903, "step": 174600 }, { "epoch": 1.5423267807626415, "grad_norm": 1.0115082263946533, "learning_rate": 7.847629702424499e-05, "loss": 3.4874, "step": 174650 }, { "epoch": 1.5427683286529257, "grad_norm": 4.5162835121154785, "learning_rate": 7.846487363535697e-05, "loss": 3.3289, "step": 174700 }, { "epoch": 1.5432098765432098, "grad_norm": 2.281966209411621, "learning_rate": 7.845344804779884e-05, "loss": 3.6502, "step": 174750 }, { "epoch": 1.543651424433494, "grad_norm": 3.5982887744903564, "learning_rate": 7.844202026245311e-05, "loss": 3.0063, "step": 174800 }, { "epoch": 1.5440929723237784, "grad_norm": 4.303706645965576, "learning_rate": 7.84305902802025e-05, "loss": 3.3499, "step": 174850 }, { "epoch": 1.5445345202140626, "grad_norm": 0.5809865593910217, "learning_rate": 7.841915810192987e-05, "loss": 3.3385, "step": 174900 }, { "epoch": 1.5449760681043467, "grad_norm": 1.4233925342559814, "learning_rate": 7.840772372851827e-05, "loss": 3.3227, "step": 174950 }, { "epoch": 1.545417615994631, "grad_norm": 1.0716662406921387, "learning_rate": 7.839628716085092e-05, "loss": 2.7928, "step": 175000 }, { "epoch": 1.545859163884915, "grad_norm": 1.625321388244629, "learning_rate": 7.838484839981117e-05, "loss": 2.9877, "step": 175050 }, { "epoch": 1.5463007117751992, "grad_norm": 1.8648103475570679, "learning_rate": 7.837340744628257e-05, "loss": 3.0413, "step": 175100 }, { "epoch": 1.5467422596654834, "grad_norm": 4.233791828155518, "learning_rate": 7.836196430114884e-05, "loss": 3.1447, "step": 175150 }, { "epoch": 1.5471838075557676, "grad_norm": 2.086745023727417, "learning_rate": 7.835051896529388e-05, "loss": 3.4688, "step": 175200 }, { "epoch": 1.5476253554460517, "grad_norm": 1.6742719411849976, "learning_rate": 7.833930041157026e-05, "loss": 3.1915, "step": 175250 }, { "epoch": 1.548066903336336, "grad_norm": 2.0787861347198486, "learning_rate": 7.832785074069555e-05, "loss": 3.1022, "step": 175300 }, { "epoch": 1.54850845122662, "grad_norm": 3.8537216186523438, "learning_rate": 7.831639888173457e-05, "loss": 2.9691, "step": 175350 }, { "epoch": 1.5489499991169042, "grad_norm": 1.3162223100662231, "learning_rate": 7.830494483557191e-05, "loss": 3.1025, "step": 175400 }, { "epoch": 1.5493915470071884, "grad_norm": 3.5656752586364746, "learning_rate": 7.829348860309228e-05, "loss": 2.9325, "step": 175450 }, { "epoch": 1.5498330948974726, "grad_norm": 1.3675990104675293, "learning_rate": 7.828203018518056e-05, "loss": 2.8666, "step": 175500 }, { "epoch": 1.5502746427877567, "grad_norm": 7.35089111328125, "learning_rate": 7.827056958272183e-05, "loss": 3.3025, "step": 175550 }, { "epoch": 1.550716190678041, "grad_norm": 1.7935676574707031, "learning_rate": 7.825910679660134e-05, "loss": 3.3327, "step": 175600 }, { "epoch": 1.551157738568325, "grad_norm": 3.305955171585083, "learning_rate": 7.824764182770448e-05, "loss": 3.1936, "step": 175650 }, { "epoch": 1.5515992864586092, "grad_norm": 2.892768621444702, "learning_rate": 7.823617467691681e-05, "loss": 3.1448, "step": 175700 }, { "epoch": 1.5520408343488934, "grad_norm": 2.3808202743530273, "learning_rate": 7.82247053451241e-05, "loss": 3.2515, "step": 175750 }, { "epoch": 1.5524823822391776, "grad_norm": 3.7346816062927246, "learning_rate": 7.821323383321224e-05, "loss": 3.2934, "step": 175800 }, { "epoch": 1.5529239301294617, "grad_norm": 1.123934030532837, "learning_rate": 7.82017601420673e-05, "loss": 3.5983, "step": 175850 }, { "epoch": 1.553365478019746, "grad_norm": 2.3236446380615234, "learning_rate": 7.819028427257553e-05, "loss": 3.1701, "step": 175900 }, { "epoch": 1.55380702591003, "grad_norm": 1.2918416261672974, "learning_rate": 7.817880622562334e-05, "loss": 3.1689, "step": 175950 }, { "epoch": 1.5542485738003142, "grad_norm": 3.1949520111083984, "learning_rate": 7.816732600209735e-05, "loss": 3.2805, "step": 176000 }, { "epoch": 1.5546901216905986, "grad_norm": 1.407420039176941, "learning_rate": 7.815584360288424e-05, "loss": 3.6234, "step": 176050 }, { "epoch": 1.5551316695808828, "grad_norm": 2.7942097187042236, "learning_rate": 7.814435902887098e-05, "loss": 2.9961, "step": 176100 }, { "epoch": 1.555573217471167, "grad_norm": 5.137963771820068, "learning_rate": 7.813287228094466e-05, "loss": 2.9095, "step": 176150 }, { "epoch": 1.5560147653614511, "grad_norm": 1.7814692258834839, "learning_rate": 7.81213833599925e-05, "loss": 3.0931, "step": 176200 }, { "epoch": 1.5564563132517353, "grad_norm": 0.8939611911773682, "learning_rate": 7.810989226690193e-05, "loss": 3.306, "step": 176250 }, { "epoch": 1.5568978611420194, "grad_norm": 0.7645553946495056, "learning_rate": 7.809839900256056e-05, "loss": 3.0424, "step": 176300 }, { "epoch": 1.5573394090323036, "grad_norm": 2.913379192352295, "learning_rate": 7.808690356785613e-05, "loss": 3.2549, "step": 176350 }, { "epoch": 1.5577809569225878, "grad_norm": 2.6756792068481445, "learning_rate": 7.807540596367658e-05, "loss": 3.6026, "step": 176400 }, { "epoch": 1.5582225048128722, "grad_norm": 1.4105322360992432, "learning_rate": 7.806390619090998e-05, "loss": 3.085, "step": 176450 }, { "epoch": 1.5586640527031563, "grad_norm": 5.497186660766602, "learning_rate": 7.80524042504446e-05, "loss": 2.7705, "step": 176500 }, { "epoch": 1.5591056005934405, "grad_norm": 6.118846416473389, "learning_rate": 7.804090014316887e-05, "loss": 3.3862, "step": 176550 }, { "epoch": 1.5595471484837247, "grad_norm": 1.7508882284164429, "learning_rate": 7.80293938699714e-05, "loss": 2.9305, "step": 176600 }, { "epoch": 1.5599886963740088, "grad_norm": 3.990896701812744, "learning_rate": 7.801788543174092e-05, "loss": 3.2443, "step": 176650 }, { "epoch": 1.560430244264293, "grad_norm": 4.021361351013184, "learning_rate": 7.800637482936637e-05, "loss": 2.9178, "step": 176700 }, { "epoch": 1.5608717921545772, "grad_norm": 1.7801082134246826, "learning_rate": 7.799486206373685e-05, "loss": 3.4446, "step": 176750 }, { "epoch": 1.5613133400448613, "grad_norm": 2.1543657779693604, "learning_rate": 7.798334713574165e-05, "loss": 3.2613, "step": 176800 }, { "epoch": 1.5617548879351455, "grad_norm": 2.6019632816314697, "learning_rate": 7.797183004627015e-05, "loss": 3.6306, "step": 176850 }, { "epoch": 1.5621964358254297, "grad_norm": 5.682262420654297, "learning_rate": 7.796031079621197e-05, "loss": 3.2412, "step": 176900 }, { "epoch": 1.5626379837157138, "grad_norm": 2.869284152984619, "learning_rate": 7.794878938645691e-05, "loss": 3.2968, "step": 176950 }, { "epoch": 1.563079531605998, "grad_norm": 2.434187173843384, "learning_rate": 7.793726581789485e-05, "loss": 3.1968, "step": 177000 }, { "epoch": 1.563079531605998, "eval_asr_loss": 0.9133994808530763, "eval_loss": 2.902208089828491, "eval_runtime": 21.1294, "eval_samples_per_second": 36.347, "eval_steps_per_second": 9.087, "eval_tts_loss": 5.981479948869558, "step": 177000 }, { "epoch": 1.5635210794962822, "grad_norm": 3.677980661392212, "learning_rate": 7.792574009141591e-05, "loss": 3.076, "step": 177050 }, { "epoch": 1.5639626273865663, "grad_norm": 1.4063122272491455, "learning_rate": 7.791421220791034e-05, "loss": 2.9536, "step": 177100 }, { "epoch": 1.5644041752768505, "grad_norm": 2.6018192768096924, "learning_rate": 7.79026821682686e-05, "loss": 3.0829, "step": 177150 }, { "epoch": 1.5648457231671347, "grad_norm": 2.5115301609039307, "learning_rate": 7.789114997338125e-05, "loss": 3.1509, "step": 177200 }, { "epoch": 1.5652872710574188, "grad_norm": 1.8205089569091797, "learning_rate": 7.787984633223084e-05, "loss": 2.9936, "step": 177250 }, { "epoch": 1.565728818947703, "grad_norm": 3.0705020427703857, "learning_rate": 7.786830987258534e-05, "loss": 3.2129, "step": 177300 }, { "epoch": 1.5661703668379872, "grad_norm": 1.5330673456192017, "learning_rate": 7.785677126034919e-05, "loss": 2.8801, "step": 177350 }, { "epoch": 1.5666119147282713, "grad_norm": 3.940424680709839, "learning_rate": 7.784523049641369e-05, "loss": 3.1278, "step": 177400 }, { "epoch": 1.5670534626185555, "grad_norm": 2.2547924518585205, "learning_rate": 7.783368758167027e-05, "loss": 2.8893, "step": 177450 }, { "epoch": 1.5674950105088397, "grad_norm": 2.2895209789276123, "learning_rate": 7.78221425170105e-05, "loss": 3.0462, "step": 177500 }, { "epoch": 1.5679365583991238, "grad_norm": 1.0775874853134155, "learning_rate": 7.781059530332613e-05, "loss": 3.2746, "step": 177550 }, { "epoch": 1.568378106289408, "grad_norm": 3.228985071182251, "learning_rate": 7.77990459415091e-05, "loss": 3.1635, "step": 177600 }, { "epoch": 1.5688196541796924, "grad_norm": 2.3288896083831787, "learning_rate": 7.778749443245153e-05, "loss": 2.9833, "step": 177650 }, { "epoch": 1.5692612020699765, "grad_norm": 1.9562253952026367, "learning_rate": 7.777594077704561e-05, "loss": 2.9966, "step": 177700 }, { "epoch": 1.5697027499602607, "grad_norm": 3.997286558151245, "learning_rate": 7.776438497618382e-05, "loss": 3.0108, "step": 177750 }, { "epoch": 1.5701442978505449, "grad_norm": 4.3550801277160645, "learning_rate": 7.775282703075872e-05, "loss": 3.4799, "step": 177800 }, { "epoch": 1.570585845740829, "grad_norm": 2.5383787155151367, "learning_rate": 7.774126694166307e-05, "loss": 3.0563, "step": 177850 }, { "epoch": 1.5710273936311132, "grad_norm": 2.1461341381073, "learning_rate": 7.772970470978978e-05, "loss": 3.5388, "step": 177900 }, { "epoch": 1.5714689415213974, "grad_norm": 1.3384110927581787, "learning_rate": 7.771814033603196e-05, "loss": 3.2212, "step": 177950 }, { "epoch": 1.5719104894116815, "grad_norm": 2.8407857418060303, "learning_rate": 7.770657382128284e-05, "loss": 3.0422, "step": 178000 }, { "epoch": 1.572352037301966, "grad_norm": 2.219881772994995, "learning_rate": 7.769500516643582e-05, "loss": 3.061, "step": 178050 }, { "epoch": 1.57279358519225, "grad_norm": 2.5825889110565186, "learning_rate": 7.76834343723845e-05, "loss": 3.6231, "step": 178100 }, { "epoch": 1.5732351330825343, "grad_norm": 2.435443162918091, "learning_rate": 7.767186144002264e-05, "loss": 3.1914, "step": 178150 }, { "epoch": 1.5736766809728184, "grad_norm": 3.284209728240967, "learning_rate": 7.766028637024411e-05, "loss": 2.9997, "step": 178200 }, { "epoch": 1.5741182288631026, "grad_norm": 3.507371187210083, "learning_rate": 7.764870916394302e-05, "loss": 3.117, "step": 178250 }, { "epoch": 1.5745597767533868, "grad_norm": 4.541454315185547, "learning_rate": 7.763712982201362e-05, "loss": 3.203, "step": 178300 }, { "epoch": 1.575001324643671, "grad_norm": 1.8233087062835693, "learning_rate": 7.762554834535028e-05, "loss": 3.2556, "step": 178350 }, { "epoch": 1.575442872533955, "grad_norm": 2.1894471645355225, "learning_rate": 7.76139647348476e-05, "loss": 3.4755, "step": 178400 }, { "epoch": 1.5758844204242393, "grad_norm": 2.089081287384033, "learning_rate": 7.760237899140028e-05, "loss": 3.3049, "step": 178450 }, { "epoch": 1.5763259683145234, "grad_norm": 3.1048781871795654, "learning_rate": 7.759079111590326e-05, "loss": 3.1276, "step": 178500 }, { "epoch": 1.5767675162048076, "grad_norm": 5.147495269775391, "learning_rate": 7.757920110925159e-05, "loss": 3.19, "step": 178550 }, { "epoch": 1.5772090640950918, "grad_norm": 5.037656784057617, "learning_rate": 7.756784083594946e-05, "loss": 3.3464, "step": 178600 }, { "epoch": 1.577650611985376, "grad_norm": 1.2267601490020752, "learning_rate": 7.755624661225285e-05, "loss": 3.5405, "step": 178650 }, { "epoch": 1.57809215987566, "grad_norm": 0.7518936991691589, "learning_rate": 7.754465026006984e-05, "loss": 3.4172, "step": 178700 }, { "epoch": 1.5785337077659443, "grad_norm": 2.8501391410827637, "learning_rate": 7.75330517802962e-05, "loss": 3.3903, "step": 178750 }, { "epoch": 1.5789752556562284, "grad_norm": 1.8526347875595093, "learning_rate": 7.752145117382777e-05, "loss": 3.047, "step": 178800 }, { "epoch": 1.5794168035465126, "grad_norm": 1.4192283153533936, "learning_rate": 7.750984844156061e-05, "loss": 3.1864, "step": 178850 }, { "epoch": 1.5798583514367968, "grad_norm": 0.8733697533607483, "learning_rate": 7.749824358439093e-05, "loss": 3.2894, "step": 178900 }, { "epoch": 1.580299899327081, "grad_norm": 2.241514205932617, "learning_rate": 7.748663660321511e-05, "loss": 3.0506, "step": 178950 }, { "epoch": 1.580741447217365, "grad_norm": 4.648041725158691, "learning_rate": 7.74750274989297e-05, "loss": 3.3081, "step": 179000 }, { "epoch": 1.5811829951076493, "grad_norm": 2.8296263217926025, "learning_rate": 7.74634162724314e-05, "loss": 3.2342, "step": 179050 }, { "epoch": 1.5816245429979334, "grad_norm": 2.477835178375244, "learning_rate": 7.745180292461707e-05, "loss": 2.7343, "step": 179100 }, { "epoch": 1.5820660908882176, "grad_norm": 1.494752287864685, "learning_rate": 7.744018745638375e-05, "loss": 3.4856, "step": 179150 }, { "epoch": 1.5825076387785018, "grad_norm": 3.970940589904785, "learning_rate": 7.742856986862864e-05, "loss": 3.1116, "step": 179200 }, { "epoch": 1.5829491866687861, "grad_norm": 2.4596049785614014, "learning_rate": 7.741695016224908e-05, "loss": 2.8419, "step": 179250 }, { "epoch": 1.5833907345590703, "grad_norm": 1.8739557266235352, "learning_rate": 7.740532833814263e-05, "loss": 3.136, "step": 179300 }, { "epoch": 1.5838322824493545, "grad_norm": 1.3350729942321777, "learning_rate": 7.739370439720693e-05, "loss": 3.5008, "step": 179350 }, { "epoch": 1.5842738303396386, "grad_norm": 5.471927165985107, "learning_rate": 7.738207834033987e-05, "loss": 3.4203, "step": 179400 }, { "epoch": 1.5847153782299228, "grad_norm": 2.8407082557678223, "learning_rate": 7.737045016843944e-05, "loss": 3.3583, "step": 179450 }, { "epoch": 1.585156926120207, "grad_norm": 2.555143356323242, "learning_rate": 7.735881988240386e-05, "loss": 3.4862, "step": 179500 }, { "epoch": 1.5855984740104911, "grad_norm": 3.2967026233673096, "learning_rate": 7.734718748313143e-05, "loss": 3.1858, "step": 179550 }, { "epoch": 1.5860400219007753, "grad_norm": 0.6562075614929199, "learning_rate": 7.733555297152064e-05, "loss": 3.2385, "step": 179600 }, { "epoch": 1.5864815697910597, "grad_norm": 1.7235629558563232, "learning_rate": 7.73239163484702e-05, "loss": 3.3023, "step": 179650 }, { "epoch": 1.5869231176813439, "grad_norm": 3.370729446411133, "learning_rate": 7.731227761487893e-05, "loss": 3.463, "step": 179700 }, { "epoch": 1.587364665571628, "grad_norm": 4.0230207443237305, "learning_rate": 7.73006367716458e-05, "loss": 3.0753, "step": 179750 }, { "epoch": 1.5878062134619122, "grad_norm": 3.8130829334259033, "learning_rate": 7.728899381966999e-05, "loss": 2.8752, "step": 179800 }, { "epoch": 1.5882477613521964, "grad_norm": 4.115664482116699, "learning_rate": 7.72773487598508e-05, "loss": 3.072, "step": 179850 }, { "epoch": 1.5886893092424805, "grad_norm": 1.7616603374481201, "learning_rate": 7.726570159308774e-05, "loss": 3.1123, "step": 179900 }, { "epoch": 1.5891308571327647, "grad_norm": 3.976717472076416, "learning_rate": 7.725405232028044e-05, "loss": 3.1311, "step": 179950 }, { "epoch": 1.5895724050230489, "grad_norm": 1.6560734510421753, "learning_rate": 7.724240094232868e-05, "loss": 3.2467, "step": 180000 }, { "epoch": 1.5895724050230489, "eval_asr_loss": 0.905089343649495, "eval_loss": 2.9030532836914062, "eval_runtime": 20.6595, "eval_samples_per_second": 37.174, "eval_steps_per_second": 9.294, "eval_tts_loss": 5.932771591964687, "step": 180000 }, { "epoch": 1.590013952913333, "grad_norm": 3.773517608642578, "learning_rate": 7.723074746013248e-05, "loss": 3.554, "step": 180050 }, { "epoch": 1.5904555008036172, "grad_norm": 3.9356374740600586, "learning_rate": 7.721909187459193e-05, "loss": 3.2263, "step": 180100 }, { "epoch": 1.5908970486939014, "grad_norm": 4.545897483825684, "learning_rate": 7.720743418660736e-05, "loss": 3.3664, "step": 180150 }, { "epoch": 1.5913385965841855, "grad_norm": 3.460737466812134, "learning_rate": 7.719577439707921e-05, "loss": 3.0415, "step": 180200 }, { "epoch": 1.5917801444744697, "grad_norm": 1.8591420650482178, "learning_rate": 7.718411250690808e-05, "loss": 3.3958, "step": 180250 }, { "epoch": 1.5922216923647539, "grad_norm": 3.867363929748535, "learning_rate": 7.717244851699481e-05, "loss": 3.0154, "step": 180300 }, { "epoch": 1.592663240255038, "grad_norm": 1.8669315576553345, "learning_rate": 7.716078242824028e-05, "loss": 3.0492, "step": 180350 }, { "epoch": 1.5931047881453222, "grad_norm": 2.609985589981079, "learning_rate": 7.714911424154566e-05, "loss": 3.1507, "step": 180400 }, { "epoch": 1.5935463360356064, "grad_norm": 3.6491219997406006, "learning_rate": 7.713744395781214e-05, "loss": 3.1012, "step": 180450 }, { "epoch": 1.5939878839258905, "grad_norm": 1.0326428413391113, "learning_rate": 7.71257715779412e-05, "loss": 3.4021, "step": 180500 }, { "epoch": 1.5944294318161747, "grad_norm": 3.4464635848999023, "learning_rate": 7.711409710283443e-05, "loss": 3.2061, "step": 180550 }, { "epoch": 1.5948709797064589, "grad_norm": 3.594515085220337, "learning_rate": 7.71024205333936e-05, "loss": 3.0179, "step": 180600 }, { "epoch": 1.595312527596743, "grad_norm": 3.2505548000335693, "learning_rate": 7.709074187052059e-05, "loss": 2.9457, "step": 180650 }, { "epoch": 1.5957540754870272, "grad_norm": 3.6564249992370605, "learning_rate": 7.70790611151175e-05, "loss": 2.9484, "step": 180700 }, { "epoch": 1.5961956233773114, "grad_norm": 2.443648099899292, "learning_rate": 7.706737826808654e-05, "loss": 3.2759, "step": 180750 }, { "epoch": 1.5966371712675955, "grad_norm": 3.290959119796753, "learning_rate": 7.705569333033016e-05, "loss": 3.3899, "step": 180800 }, { "epoch": 1.59707871915788, "grad_norm": 4.142195224761963, "learning_rate": 7.704400630275089e-05, "loss": 3.1456, "step": 180850 }, { "epoch": 1.597520267048164, "grad_norm": 3.0236165523529053, "learning_rate": 7.703231718625143e-05, "loss": 3.1064, "step": 180900 }, { "epoch": 1.5979618149384482, "grad_norm": 1.8898965120315552, "learning_rate": 7.702062598173473e-05, "loss": 3.1881, "step": 180950 }, { "epoch": 1.5984033628287324, "grad_norm": 4.283849716186523, "learning_rate": 7.700893269010378e-05, "loss": 3.1001, "step": 181000 }, { "epoch": 1.5988449107190166, "grad_norm": 1.4696093797683716, "learning_rate": 7.699723731226182e-05, "loss": 3.0011, "step": 181050 }, { "epoch": 1.5992864586093007, "grad_norm": 2.0414416790008545, "learning_rate": 7.698553984911223e-05, "loss": 3.6156, "step": 181100 }, { "epoch": 1.599728006499585, "grad_norm": 4.270824432373047, "learning_rate": 7.697384030155848e-05, "loss": 3.7908, "step": 181150 }, { "epoch": 1.600169554389869, "grad_norm": 6.27476167678833, "learning_rate": 7.69621386705043e-05, "loss": 3.1732, "step": 181200 }, { "epoch": 1.6006111022801535, "grad_norm": 2.570694923400879, "learning_rate": 7.695043495685356e-05, "loss": 3.2865, "step": 181250 }, { "epoch": 1.6010526501704376, "grad_norm": 3.351912260055542, "learning_rate": 7.693872916151025e-05, "loss": 3.4516, "step": 181300 }, { "epoch": 1.6014941980607218, "grad_norm": 2.8289058208465576, "learning_rate": 7.692702128537855e-05, "loss": 3.7611, "step": 181350 }, { "epoch": 1.601935745951006, "grad_norm": 5.322634696960449, "learning_rate": 7.691531132936277e-05, "loss": 3.226, "step": 181400 }, { "epoch": 1.6023772938412901, "grad_norm": 1.7941278219223022, "learning_rate": 7.690359929436743e-05, "loss": 3.2578, "step": 181450 }, { "epoch": 1.6028188417315743, "grad_norm": 0.7209708094596863, "learning_rate": 7.689188518129719e-05, "loss": 3.514, "step": 181500 }, { "epoch": 1.6032603896218585, "grad_norm": 6.211198806762695, "learning_rate": 7.688016899105685e-05, "loss": 2.9773, "step": 181550 }, { "epoch": 1.6037019375121426, "grad_norm": 0.9564201831817627, "learning_rate": 7.68684507245514e-05, "loss": 3.0722, "step": 181600 }, { "epoch": 1.6041434854024268, "grad_norm": 1.6184066534042358, "learning_rate": 7.685673038268596e-05, "loss": 2.9939, "step": 181650 }, { "epoch": 1.604585033292711, "grad_norm": 1.1458969116210938, "learning_rate": 7.684500796636585e-05, "loss": 3.48, "step": 181700 }, { "epoch": 1.6050265811829951, "grad_norm": 1.6181378364562988, "learning_rate": 7.68332834764965e-05, "loss": 2.8879, "step": 181750 }, { "epoch": 1.6054681290732793, "grad_norm": 6.061189651489258, "learning_rate": 7.682155691398355e-05, "loss": 3.367, "step": 181800 }, { "epoch": 1.6059096769635635, "grad_norm": 3.899233818054199, "learning_rate": 7.680982827973275e-05, "loss": 3.1318, "step": 181850 }, { "epoch": 1.6063512248538476, "grad_norm": 3.1183807849884033, "learning_rate": 7.679809757465007e-05, "loss": 3.4564, "step": 181900 }, { "epoch": 1.6067927727441318, "grad_norm": 0.9795408844947815, "learning_rate": 7.678636479964159e-05, "loss": 3.5955, "step": 181950 }, { "epoch": 1.607234320634416, "grad_norm": 1.9428582191467285, "learning_rate": 7.677462995561357e-05, "loss": 3.2042, "step": 182000 }, { "epoch": 1.6076758685247001, "grad_norm": 3.8682286739349365, "learning_rate": 7.676289304347245e-05, "loss": 3.096, "step": 182050 }, { "epoch": 1.6081174164149843, "grad_norm": 1.1212639808654785, "learning_rate": 7.675115406412476e-05, "loss": 3.3886, "step": 182100 }, { "epoch": 1.6085589643052685, "grad_norm": 2.1383824348449707, "learning_rate": 7.673941301847727e-05, "loss": 3.1706, "step": 182150 }, { "epoch": 1.6090005121955526, "grad_norm": 1.5465537309646606, "learning_rate": 7.672766990743687e-05, "loss": 3.1482, "step": 182200 }, { "epoch": 1.6094420600858368, "grad_norm": 1.8144700527191162, "learning_rate": 7.67159247319106e-05, "loss": 3.2361, "step": 182250 }, { "epoch": 1.609883607976121, "grad_norm": 1.2655187845230103, "learning_rate": 7.67041774928057e-05, "loss": 2.5676, "step": 182300 }, { "epoch": 1.6103251558664051, "grad_norm": 2.401733636856079, "learning_rate": 7.669242819102953e-05, "loss": 3.006, "step": 182350 }, { "epoch": 1.6107667037566893, "grad_norm": 0.8709122538566589, "learning_rate": 7.668067682748963e-05, "loss": 3.1246, "step": 182400 }, { "epoch": 1.6112082516469737, "grad_norm": 0.2746553421020508, "learning_rate": 7.666892340309371e-05, "loss": 2.8896, "step": 182450 }, { "epoch": 1.6116497995372578, "grad_norm": 2.582265615463257, "learning_rate": 7.665716791874958e-05, "loss": 3.1229, "step": 182500 }, { "epoch": 1.612091347427542, "grad_norm": 2.9576897621154785, "learning_rate": 7.664541037536526e-05, "loss": 2.6456, "step": 182550 }, { "epoch": 1.6125328953178262, "grad_norm": 3.0392637252807617, "learning_rate": 7.663365077384898e-05, "loss": 3.5719, "step": 182600 }, { "epoch": 1.6129744432081103, "grad_norm": 1.836568832397461, "learning_rate": 7.6621889115109e-05, "loss": 3.2949, "step": 182650 }, { "epoch": 1.6134159910983945, "grad_norm": 1.3547306060791016, "learning_rate": 7.661012540005381e-05, "loss": 3.3838, "step": 182700 }, { "epoch": 1.6138575389886787, "grad_norm": 3.1797091960906982, "learning_rate": 7.659835962959211e-05, "loss": 3.1803, "step": 182750 }, { "epoch": 1.6142990868789628, "grad_norm": 1.4783034324645996, "learning_rate": 7.658659180463267e-05, "loss": 3.308, "step": 182800 }, { "epoch": 1.6147406347692472, "grad_norm": 3.789548873901367, "learning_rate": 7.657482192608445e-05, "loss": 3.2409, "step": 182850 }, { "epoch": 1.6151821826595314, "grad_norm": 5.076577186584473, "learning_rate": 7.65630499948566e-05, "loss": 2.4807, "step": 182900 }, { "epoch": 1.6156237305498156, "grad_norm": 4.224163055419922, "learning_rate": 7.655127601185837e-05, "loss": 2.828, "step": 182950 }, { "epoch": 1.6160652784400997, "grad_norm": 3.1117782592773438, "learning_rate": 7.653949997799924e-05, "loss": 2.9108, "step": 183000 }, { "epoch": 1.6160652784400997, "eval_asr_loss": 0.9132444849537853, "eval_loss": 2.907047986984253, "eval_runtime": 21.3259, "eval_samples_per_second": 36.013, "eval_steps_per_second": 9.003, "eval_tts_loss": 5.958474148858937, "step": 183000 }, { "epoch": 1.616506826330384, "grad_norm": 1.899177074432373, "learning_rate": 7.652772189418875e-05, "loss": 3.2806, "step": 183050 }, { "epoch": 1.616948374220668, "grad_norm": 4.129954814910889, "learning_rate": 7.651594176133672e-05, "loss": 2.7331, "step": 183100 }, { "epoch": 1.6173899221109522, "grad_norm": 1.1719375848770142, "learning_rate": 7.650415958035303e-05, "loss": 3.4316, "step": 183150 }, { "epoch": 1.6178314700012364, "grad_norm": 1.4842125177383423, "learning_rate": 7.649237535214776e-05, "loss": 3.2723, "step": 183200 }, { "epoch": 1.6182730178915206, "grad_norm": 2.0932915210723877, "learning_rate": 7.648058907763114e-05, "loss": 3.2312, "step": 183250 }, { "epoch": 1.6187145657818047, "grad_norm": 2.771362066268921, "learning_rate": 7.646880075771357e-05, "loss": 3.5672, "step": 183300 }, { "epoch": 1.619156113672089, "grad_norm": 1.0976365804672241, "learning_rate": 7.645724622062386e-05, "loss": 3.2644, "step": 183350 }, { "epoch": 1.619597661562373, "grad_norm": 2.213742971420288, "learning_rate": 7.644545385349884e-05, "loss": 3.3306, "step": 183400 }, { "epoch": 1.6200392094526572, "grad_norm": 4.350307464599609, "learning_rate": 7.643365944368675e-05, "loss": 3.603, "step": 183450 }, { "epoch": 1.6204807573429414, "grad_norm": 2.864485263824463, "learning_rate": 7.642186299209865e-05, "loss": 3.2446, "step": 183500 }, { "epoch": 1.6209223052332256, "grad_norm": 3.199735403060913, "learning_rate": 7.641006449964566e-05, "loss": 3.2124, "step": 183550 }, { "epoch": 1.6213638531235097, "grad_norm": 4.11868143081665, "learning_rate": 7.639826396723914e-05, "loss": 3.0056, "step": 183600 }, { "epoch": 1.621805401013794, "grad_norm": 1.4297511577606201, "learning_rate": 7.638646139579058e-05, "loss": 3.5371, "step": 183650 }, { "epoch": 1.622246948904078, "grad_norm": 2.8237781524658203, "learning_rate": 7.637465678621162e-05, "loss": 3.2139, "step": 183700 }, { "epoch": 1.6226884967943622, "grad_norm": 4.127041339874268, "learning_rate": 7.636308629230888e-05, "loss": 3.174, "step": 183750 }, { "epoch": 1.6231300446846464, "grad_norm": 3.040043592453003, "learning_rate": 7.635127764992194e-05, "loss": 3.4317, "step": 183800 }, { "epoch": 1.6235715925749306, "grad_norm": 2.9634487628936768, "learning_rate": 7.633946697212221e-05, "loss": 3.2735, "step": 183850 }, { "epoch": 1.6240131404652147, "grad_norm": 4.035878658294678, "learning_rate": 7.632765425982202e-05, "loss": 3.3785, "step": 183900 }, { "epoch": 1.6244546883554989, "grad_norm": 3.683920383453369, "learning_rate": 7.631583951393382e-05, "loss": 3.1896, "step": 183950 }, { "epoch": 1.624896236245783, "grad_norm": 2.083125352859497, "learning_rate": 7.630402273537014e-05, "loss": 2.9794, "step": 184000 }, { "epoch": 1.6253377841360674, "grad_norm": 2.1921465396881104, "learning_rate": 7.629220392504377e-05, "loss": 3.3312, "step": 184050 }, { "epoch": 1.6257793320263516, "grad_norm": 1.8298650979995728, "learning_rate": 7.628038308386757e-05, "loss": 3.3576, "step": 184100 }, { "epoch": 1.6262208799166358, "grad_norm": 1.597791314125061, "learning_rate": 7.626856021275465e-05, "loss": 3.5873, "step": 184150 }, { "epoch": 1.62666242780692, "grad_norm": 3.174025774002075, "learning_rate": 7.62567353126182e-05, "loss": 3.1898, "step": 184200 }, { "epoch": 1.627103975697204, "grad_norm": 2.265221357345581, "learning_rate": 7.624490838437161e-05, "loss": 3.1095, "step": 184250 }, { "epoch": 1.6275455235874883, "grad_norm": 2.2410521507263184, "learning_rate": 7.623307942892839e-05, "loss": 2.921, "step": 184300 }, { "epoch": 1.6279870714777724, "grad_norm": 2.9074504375457764, "learning_rate": 7.622124844720224e-05, "loss": 3.3522, "step": 184350 }, { "epoch": 1.6284286193680566, "grad_norm": 2.2740726470947266, "learning_rate": 7.620941544010697e-05, "loss": 3.4447, "step": 184400 }, { "epoch": 1.628870167258341, "grad_norm": 3.019122838973999, "learning_rate": 7.619758040855664e-05, "loss": 3.183, "step": 184450 }, { "epoch": 1.6293117151486252, "grad_norm": 2.1911163330078125, "learning_rate": 7.618574335346536e-05, "loss": 3.0883, "step": 184500 }, { "epoch": 1.6297532630389093, "grad_norm": 1.324202299118042, "learning_rate": 7.617390427574744e-05, "loss": 3.1827, "step": 184550 }, { "epoch": 1.6301948109291935, "grad_norm": 2.130458116531372, "learning_rate": 7.61620631763174e-05, "loss": 3.4636, "step": 184600 }, { "epoch": 1.6306363588194777, "grad_norm": 3.302278757095337, "learning_rate": 7.61502200560898e-05, "loss": 3.1299, "step": 184650 }, { "epoch": 1.6310779067097618, "grad_norm": 0.9419139623641968, "learning_rate": 7.613837491597944e-05, "loss": 3.2849, "step": 184700 }, { "epoch": 1.631519454600046, "grad_norm": 4.2170186042785645, "learning_rate": 7.612652775690127e-05, "loss": 2.7851, "step": 184750 }, { "epoch": 1.6319610024903302, "grad_norm": 3.18815541267395, "learning_rate": 7.611467857977039e-05, "loss": 3.372, "step": 184800 }, { "epoch": 1.6324025503806143, "grad_norm": 4.340141773223877, "learning_rate": 7.610282738550204e-05, "loss": 3.2953, "step": 184850 }, { "epoch": 1.6328440982708985, "grad_norm": 4.1585869789123535, "learning_rate": 7.60909741750116e-05, "loss": 2.9285, "step": 184900 }, { "epoch": 1.6332856461611827, "grad_norm": 2.83577299118042, "learning_rate": 7.607911894921467e-05, "loss": 3.2021, "step": 184950 }, { "epoch": 1.6337271940514668, "grad_norm": 1.7795286178588867, "learning_rate": 7.606726170902693e-05, "loss": 3.1474, "step": 185000 }, { "epoch": 1.634168741941751, "grad_norm": 2.461143732070923, "learning_rate": 7.605540245536427e-05, "loss": 2.8676, "step": 185050 }, { "epoch": 1.6346102898320352, "grad_norm": 1.4866169691085815, "learning_rate": 7.604354118914272e-05, "loss": 3.1229, "step": 185100 }, { "epoch": 1.6350518377223193, "grad_norm": 3.6347692012786865, "learning_rate": 7.603167791127845e-05, "loss": 3.7924, "step": 185150 }, { "epoch": 1.6354933856126035, "grad_norm": 2.398576259613037, "learning_rate": 7.60198126226878e-05, "loss": 3.2153, "step": 185200 }, { "epoch": 1.6359349335028877, "grad_norm": 2.5132200717926025, "learning_rate": 7.600794532428728e-05, "loss": 3.4534, "step": 185250 }, { "epoch": 1.6363764813931718, "grad_norm": 2.564957857131958, "learning_rate": 7.59960760169935e-05, "loss": 3.1164, "step": 185300 }, { "epoch": 1.636818029283456, "grad_norm": 4.661256313323975, "learning_rate": 7.598420470172331e-05, "loss": 3.5114, "step": 185350 }, { "epoch": 1.6372595771737402, "grad_norm": 1.975266695022583, "learning_rate": 7.597233137939364e-05, "loss": 3.399, "step": 185400 }, { "epoch": 1.6377011250640243, "grad_norm": 3.3702497482299805, "learning_rate": 7.59604560509216e-05, "loss": 3.1902, "step": 185450 }, { "epoch": 1.6381426729543085, "grad_norm": 3.0457959175109863, "learning_rate": 7.594857871722449e-05, "loss": 2.8022, "step": 185500 }, { "epoch": 1.6385842208445927, "grad_norm": 3.148191213607788, "learning_rate": 7.59366993792197e-05, "loss": 3.1881, "step": 185550 }, { "epoch": 1.6390257687348768, "grad_norm": 2.3851609230041504, "learning_rate": 7.592481803782483e-05, "loss": 2.9736, "step": 185600 }, { "epoch": 1.6394673166251612, "grad_norm": 0.8741468191146851, "learning_rate": 7.591293469395758e-05, "loss": 3.0127, "step": 185650 }, { "epoch": 1.6399088645154454, "grad_norm": 4.1316046714782715, "learning_rate": 7.590104934853588e-05, "loss": 3.2769, "step": 185700 }, { "epoch": 1.6403504124057295, "grad_norm": 1.8304107189178467, "learning_rate": 7.588916200247775e-05, "loss": 3.5342, "step": 185750 }, { "epoch": 1.6407919602960137, "grad_norm": 2.1133339405059814, "learning_rate": 7.587727265670137e-05, "loss": 3.3722, "step": 185800 }, { "epoch": 1.6412335081862979, "grad_norm": 2.7472083568573, "learning_rate": 7.586538131212512e-05, "loss": 3.2366, "step": 185850 }, { "epoch": 1.641675056076582, "grad_norm": 2.5508406162261963, "learning_rate": 7.58534879696675e-05, "loss": 3.3287, "step": 185900 }, { "epoch": 1.6421166039668662, "grad_norm": 5.497042179107666, "learning_rate": 7.584159263024718e-05, "loss": 3.3166, "step": 185950 }, { "epoch": 1.6425581518571504, "grad_norm": 4.364591598510742, "learning_rate": 7.582969529478294e-05, "loss": 3.5261, "step": 186000 }, { "epoch": 1.6425581518571504, "eval_asr_loss": 0.9146858002053176, "eval_loss": 2.897829055786133, "eval_runtime": 21.1777, "eval_samples_per_second": 36.265, "eval_steps_per_second": 9.066, "eval_tts_loss": 5.956395547650511, "step": 186000 }, { "epoch": 1.6429996997474348, "grad_norm": 3.4542715549468994, "learning_rate": 7.581779596419378e-05, "loss": 2.9081, "step": 186050 }, { "epoch": 1.643441247637719, "grad_norm": 4.252161502838135, "learning_rate": 7.58058946393988e-05, "loss": 3.3154, "step": 186100 }, { "epoch": 1.643882795528003, "grad_norm": 3.3466477394104004, "learning_rate": 7.579399132131731e-05, "loss": 3.35, "step": 186150 }, { "epoch": 1.6443243434182873, "grad_norm": 3.145517349243164, "learning_rate": 7.578208601086868e-05, "loss": 3.3923, "step": 186200 }, { "epoch": 1.6447658913085714, "grad_norm": 3.68949556350708, "learning_rate": 7.577017870897253e-05, "loss": 3.0109, "step": 186250 }, { "epoch": 1.6452074391988556, "grad_norm": 1.4948679208755493, "learning_rate": 7.575826941654862e-05, "loss": 3.4221, "step": 186300 }, { "epoch": 1.6456489870891398, "grad_norm": 6.5033488273620605, "learning_rate": 7.574635813451684e-05, "loss": 2.666, "step": 186350 }, { "epoch": 1.646090534979424, "grad_norm": 2.8202450275421143, "learning_rate": 7.57344448637972e-05, "loss": 3.3709, "step": 186400 }, { "epoch": 1.646532082869708, "grad_norm": 1.8358458280563354, "learning_rate": 7.57225296053099e-05, "loss": 3.1007, "step": 186450 }, { "epoch": 1.6469736307599923, "grad_norm": 1.9176243543624878, "learning_rate": 7.571061235997531e-05, "loss": 3.1518, "step": 186500 }, { "epoch": 1.6474151786502764, "grad_norm": 0.955685555934906, "learning_rate": 7.569869312871396e-05, "loss": 3.6704, "step": 186550 }, { "epoch": 1.6478567265405606, "grad_norm": 4.65590763092041, "learning_rate": 7.568677191244646e-05, "loss": 2.8114, "step": 186600 }, { "epoch": 1.6482982744308448, "grad_norm": 1.1020005941390991, "learning_rate": 7.567484871209367e-05, "loss": 3.2136, "step": 186650 }, { "epoch": 1.648739822321129, "grad_norm": 2.0391685962677, "learning_rate": 7.566292352857651e-05, "loss": 3.4038, "step": 186700 }, { "epoch": 1.649181370211413, "grad_norm": 2.0860207080841064, "learning_rate": 7.565099636281614e-05, "loss": 3.4063, "step": 186750 }, { "epoch": 1.6496229181016973, "grad_norm": 2.8292300701141357, "learning_rate": 7.563906721573379e-05, "loss": 3.2938, "step": 186800 }, { "epoch": 1.6500644659919814, "grad_norm": 3.6155056953430176, "learning_rate": 7.562713608825094e-05, "loss": 3.0306, "step": 186850 }, { "epoch": 1.6505060138822656, "grad_norm": 1.792187213897705, "learning_rate": 7.561520298128913e-05, "loss": 2.9088, "step": 186900 }, { "epoch": 1.6509475617725498, "grad_norm": 3.0184032917022705, "learning_rate": 7.56032678957701e-05, "loss": 3.7706, "step": 186950 }, { "epoch": 1.651389109662834, "grad_norm": 1.4571894407272339, "learning_rate": 7.559133083261574e-05, "loss": 2.7708, "step": 187000 }, { "epoch": 1.651830657553118, "grad_norm": 1.022334337234497, "learning_rate": 7.557939179274807e-05, "loss": 3.2407, "step": 187050 }, { "epoch": 1.6522722054434023, "grad_norm": 2.326892137527466, "learning_rate": 7.556745077708928e-05, "loss": 3.0828, "step": 187100 }, { "epoch": 1.6527137533336864, "grad_norm": 7.504704475402832, "learning_rate": 7.555550778656176e-05, "loss": 2.9196, "step": 187150 }, { "epoch": 1.6531553012239706, "grad_norm": 1.7373300790786743, "learning_rate": 7.554356282208792e-05, "loss": 3.4106, "step": 187200 }, { "epoch": 1.653596849114255, "grad_norm": 7.9670538902282715, "learning_rate": 7.553185484267011e-05, "loss": 2.9811, "step": 187250 }, { "epoch": 1.6540383970045391, "grad_norm": 4.288424015045166, "learning_rate": 7.551990597250481e-05, "loss": 2.9887, "step": 187300 }, { "epoch": 1.6544799448948233, "grad_norm": 3.482987642288208, "learning_rate": 7.550795513114318e-05, "loss": 3.6097, "step": 187350 }, { "epoch": 1.6549214927851075, "grad_norm": 2.8395724296569824, "learning_rate": 7.549600231950832e-05, "loss": 3.4265, "step": 187400 }, { "epoch": 1.6553630406753916, "grad_norm": 2.013218641281128, "learning_rate": 7.548404753852349e-05, "loss": 3.6109, "step": 187450 }, { "epoch": 1.6558045885656758, "grad_norm": 1.625708818435669, "learning_rate": 7.547209078911207e-05, "loss": 3.0623, "step": 187500 }, { "epoch": 1.65624613645596, "grad_norm": 1.8119765520095825, "learning_rate": 7.546013207219765e-05, "loss": 3.1547, "step": 187550 }, { "epoch": 1.6566876843462441, "grad_norm": 5.0249481201171875, "learning_rate": 7.54481713887039e-05, "loss": 2.7106, "step": 187600 }, { "epoch": 1.6571292322365285, "grad_norm": 1.6827908754348755, "learning_rate": 7.543620873955472e-05, "loss": 3.3438, "step": 187650 }, { "epoch": 1.6575707801268127, "grad_norm": 1.1493171453475952, "learning_rate": 7.542424412567412e-05, "loss": 2.9085, "step": 187700 }, { "epoch": 1.6580123280170969, "grad_norm": 1.2166669368743896, "learning_rate": 7.541227754798624e-05, "loss": 2.9538, "step": 187750 }, { "epoch": 1.658453875907381, "grad_norm": 3.9026665687561035, "learning_rate": 7.540030900741539e-05, "loss": 3.4233, "step": 187800 }, { "epoch": 1.6588954237976652, "grad_norm": 2.1174581050872803, "learning_rate": 7.538833850488609e-05, "loss": 3.0982, "step": 187850 }, { "epoch": 1.6593369716879494, "grad_norm": 1.8407869338989258, "learning_rate": 7.53763660413229e-05, "loss": 3.239, "step": 187900 }, { "epoch": 1.6597785195782335, "grad_norm": 4.55293607711792, "learning_rate": 7.536439161765063e-05, "loss": 3.4121, "step": 187950 }, { "epoch": 1.6602200674685177, "grad_norm": 2.4583520889282227, "learning_rate": 7.53524152347942e-05, "loss": 3.2692, "step": 188000 }, { "epoch": 1.6606616153588019, "grad_norm": 3.5421550273895264, "learning_rate": 7.534043689367867e-05, "loss": 3.1821, "step": 188050 }, { "epoch": 1.661103163249086, "grad_norm": 3.7831602096557617, "learning_rate": 7.532845659522927e-05, "loss": 3.74, "step": 188100 }, { "epoch": 1.6615447111393702, "grad_norm": 1.2975730895996094, "learning_rate": 7.531647434037137e-05, "loss": 2.9245, "step": 188150 }, { "epoch": 1.6619862590296544, "grad_norm": 3.5446507930755615, "learning_rate": 7.530449013003051e-05, "loss": 3.12, "step": 188200 }, { "epoch": 1.6624278069199385, "grad_norm": 1.0230252742767334, "learning_rate": 7.529250396513237e-05, "loss": 3.3197, "step": 188250 }, { "epoch": 1.6628693548102227, "grad_norm": 2.7343640327453613, "learning_rate": 7.528051584660276e-05, "loss": 3.3, "step": 188300 }, { "epoch": 1.6633109027005069, "grad_norm": 0.8870599269866943, "learning_rate": 7.526852577536769e-05, "loss": 3.054, "step": 188350 }, { "epoch": 1.663752450590791, "grad_norm": 3.2465200424194336, "learning_rate": 7.525653375235327e-05, "loss": 2.9143, "step": 188400 }, { "epoch": 1.6641939984810752, "grad_norm": 3.82564377784729, "learning_rate": 7.524453977848578e-05, "loss": 3.2014, "step": 188450 }, { "epoch": 1.6646355463713594, "grad_norm": 1.1681989431381226, "learning_rate": 7.523254385469166e-05, "loss": 3.2335, "step": 188500 }, { "epoch": 1.6650770942616435, "grad_norm": 1.4739675521850586, "learning_rate": 7.522054598189747e-05, "loss": 2.8809, "step": 188550 }, { "epoch": 1.6655186421519277, "grad_norm": 2.9741079807281494, "learning_rate": 7.520854616103002e-05, "loss": 3.5088, "step": 188600 }, { "epoch": 1.6659601900422119, "grad_norm": 2.840975284576416, "learning_rate": 7.519654439301609e-05, "loss": 3.3214, "step": 188650 }, { "epoch": 1.666401737932496, "grad_norm": 3.7124547958374023, "learning_rate": 7.51845406787828e-05, "loss": 3.6984, "step": 188700 }, { "epoch": 1.6668432858227802, "grad_norm": 6.617938995361328, "learning_rate": 7.517253501925727e-05, "loss": 3.3593, "step": 188750 }, { "epoch": 1.6672848337130646, "grad_norm": 1.4384989738464355, "learning_rate": 7.51605274153669e-05, "loss": 3.458, "step": 188800 }, { "epoch": 1.6677263816033487, "grad_norm": 4.336093902587891, "learning_rate": 7.514851786803913e-05, "loss": 3.294, "step": 188850 }, { "epoch": 1.668167929493633, "grad_norm": 1.1996614933013916, "learning_rate": 7.51365063782016e-05, "loss": 3.3519, "step": 188900 }, { "epoch": 1.668609477383917, "grad_norm": 1.576010823249817, "learning_rate": 7.512449294678212e-05, "loss": 3.2801, "step": 188950 }, { "epoch": 1.6690510252742012, "grad_norm": 0.6536944508552551, "learning_rate": 7.511247757470859e-05, "loss": 3.2111, "step": 189000 }, { "epoch": 1.6690510252742012, "eval_asr_loss": 0.9149196284461532, "eval_loss": 2.8986892700195312, "eval_runtime": 20.5204, "eval_samples_per_second": 37.426, "eval_steps_per_second": 9.357, "eval_tts_loss": 5.990396785670996, "step": 189000 }, { "epoch": 1.6694925731644854, "grad_norm": 3.608792543411255, "learning_rate": 7.510046026290912e-05, "loss": 3.178, "step": 189050 }, { "epoch": 1.6699341210547696, "grad_norm": 4.6743245124816895, "learning_rate": 7.508844101231195e-05, "loss": 2.9165, "step": 189100 }, { "epoch": 1.6703756689450537, "grad_norm": 1.1375796794891357, "learning_rate": 7.507641982384543e-05, "loss": 3.3253, "step": 189150 }, { "epoch": 1.6708172168353381, "grad_norm": 0.7114461660385132, "learning_rate": 7.506439669843816e-05, "loss": 3.0497, "step": 189200 }, { "epoch": 1.6712587647256223, "grad_norm": 4.1343889236450195, "learning_rate": 7.505237163701877e-05, "loss": 3.3836, "step": 189250 }, { "epoch": 1.6717003126159065, "grad_norm": 2.8716280460357666, "learning_rate": 7.504034464051609e-05, "loss": 3.3779, "step": 189300 }, { "epoch": 1.6721418605061906, "grad_norm": 1.575553059577942, "learning_rate": 7.502831570985915e-05, "loss": 3.185, "step": 189350 }, { "epoch": 1.6725834083964748, "grad_norm": 2.8956246376037598, "learning_rate": 7.501628484597702e-05, "loss": 3.5845, "step": 189400 }, { "epoch": 1.673024956286759, "grad_norm": 4.885554790496826, "learning_rate": 7.500425204979906e-05, "loss": 3.3725, "step": 189450 }, { "epoch": 1.6734665041770431, "grad_norm": 6.7091264724731445, "learning_rate": 7.499221732225464e-05, "loss": 3.3188, "step": 189500 }, { "epoch": 1.6739080520673273, "grad_norm": 1.5392342805862427, "learning_rate": 7.498018066427335e-05, "loss": 3.2148, "step": 189550 }, { "epoch": 1.6743495999576115, "grad_norm": 1.949496865272522, "learning_rate": 7.496814207678494e-05, "loss": 3.519, "step": 189600 }, { "epoch": 1.6747911478478956, "grad_norm": 2.188905715942383, "learning_rate": 7.495610156071927e-05, "loss": 2.8393, "step": 189650 }, { "epoch": 1.6752326957381798, "grad_norm": 2.987746477127075, "learning_rate": 7.494405911700638e-05, "loss": 3.0112, "step": 189700 }, { "epoch": 1.675674243628464, "grad_norm": 1.8022232055664062, "learning_rate": 7.493201474657646e-05, "loss": 3.2487, "step": 189750 }, { "epoch": 1.6761157915187481, "grad_norm": 0.8079404830932617, "learning_rate": 7.491996845035979e-05, "loss": 3.4419, "step": 189800 }, { "epoch": 1.6765573394090323, "grad_norm": 2.956101655960083, "learning_rate": 7.490792022928689e-05, "loss": 3.4542, "step": 189850 }, { "epoch": 1.6769988872993165, "grad_norm": 1.22069251537323, "learning_rate": 7.489587008428834e-05, "loss": 3.2008, "step": 189900 }, { "epoch": 1.6774404351896006, "grad_norm": 1.820133924484253, "learning_rate": 7.488381801629498e-05, "loss": 3.1799, "step": 189950 }, { "epoch": 1.6778819830798848, "grad_norm": 1.816726565361023, "learning_rate": 7.487176402623766e-05, "loss": 3.0303, "step": 190000 }, { "epoch": 1.678323530970169, "grad_norm": 2.491438150405884, "learning_rate": 7.485970811504748e-05, "loss": 3.1211, "step": 190050 }, { "epoch": 1.6787650788604531, "grad_norm": 1.7439849376678467, "learning_rate": 7.484765028365564e-05, "loss": 2.8709, "step": 190100 }, { "epoch": 1.6792066267507373, "grad_norm": 3.090697765350342, "learning_rate": 7.483559053299354e-05, "loss": 3.8603, "step": 190150 }, { "epoch": 1.6796481746410215, "grad_norm": 4.9195122718811035, "learning_rate": 7.482352886399265e-05, "loss": 3.1793, "step": 190200 }, { "epoch": 1.6800897225313056, "grad_norm": 6.847498416900635, "learning_rate": 7.481146527758468e-05, "loss": 3.0936, "step": 190250 }, { "epoch": 1.6805312704215898, "grad_norm": 2.2523751258850098, "learning_rate": 7.479939977470137e-05, "loss": 2.8686, "step": 190300 }, { "epoch": 1.680972818311874, "grad_norm": 3.1140174865722656, "learning_rate": 7.478733235627476e-05, "loss": 3.4243, "step": 190350 }, { "epoch": 1.6814143662021583, "grad_norm": 6.4473066329956055, "learning_rate": 7.477526302323691e-05, "loss": 2.9147, "step": 190400 }, { "epoch": 1.6818559140924425, "grad_norm": 5.672176837921143, "learning_rate": 7.476319177652005e-05, "loss": 2.9304, "step": 190450 }, { "epoch": 1.6822974619827267, "grad_norm": 2.4453301429748535, "learning_rate": 7.475111861705664e-05, "loss": 3.141, "step": 190500 }, { "epoch": 1.6827390098730108, "grad_norm": 4.314248561859131, "learning_rate": 7.473904354577918e-05, "loss": 2.9855, "step": 190550 }, { "epoch": 1.683180557763295, "grad_norm": 4.665952205657959, "learning_rate": 7.472696656362042e-05, "loss": 3.5754, "step": 190600 }, { "epoch": 1.6836221056535792, "grad_norm": 3.7188048362731934, "learning_rate": 7.471488767151315e-05, "loss": 3.052, "step": 190650 }, { "epoch": 1.6840636535438633, "grad_norm": 2.8768093585968018, "learning_rate": 7.470280687039037e-05, "loss": 3.331, "step": 190700 }, { "epoch": 1.6845052014341475, "grad_norm": 2.511436700820923, "learning_rate": 7.469072416118526e-05, "loss": 3.256, "step": 190750 }, { "epoch": 1.684946749324432, "grad_norm": 2.553476572036743, "learning_rate": 7.467863954483105e-05, "loss": 2.9051, "step": 190800 }, { "epoch": 1.685388297214716, "grad_norm": 2.4301607608795166, "learning_rate": 7.466655302226122e-05, "loss": 3.1829, "step": 190850 }, { "epoch": 1.6858298451050002, "grad_norm": 2.7761809825897217, "learning_rate": 7.465446459440935e-05, "loss": 3.1125, "step": 190900 }, { "epoch": 1.6862713929952844, "grad_norm": 4.545463562011719, "learning_rate": 7.464237426220914e-05, "loss": 3.0704, "step": 190950 }, { "epoch": 1.6867129408855686, "grad_norm": 2.1818442344665527, "learning_rate": 7.463028202659448e-05, "loss": 3.6544, "step": 191000 }, { "epoch": 1.6871544887758527, "grad_norm": 3.3841798305511475, "learning_rate": 7.461818788849938e-05, "loss": 3.333, "step": 191050 }, { "epoch": 1.687596036666137, "grad_norm": 1.6875982284545898, "learning_rate": 7.460609184885802e-05, "loss": 3.605, "step": 191100 }, { "epoch": 1.688037584556421, "grad_norm": 2.939258337020874, "learning_rate": 7.459399390860476e-05, "loss": 2.9491, "step": 191150 }, { "epoch": 1.6884791324467052, "grad_norm": 4.111961364746094, "learning_rate": 7.458189406867398e-05, "loss": 3.0345, "step": 191200 }, { "epoch": 1.6889206803369894, "grad_norm": 4.00358772277832, "learning_rate": 7.456979233000034e-05, "loss": 2.8185, "step": 191250 }, { "epoch": 1.6893622282272736, "grad_norm": 2.474588394165039, "learning_rate": 7.45576886935186e-05, "loss": 3.2167, "step": 191300 }, { "epoch": 1.6898037761175577, "grad_norm": 2.620952606201172, "learning_rate": 7.454558316016364e-05, "loss": 3.115, "step": 191350 }, { "epoch": 1.6902453240078419, "grad_norm": 5.4351701736450195, "learning_rate": 7.453347573087052e-05, "loss": 3.096, "step": 191400 }, { "epoch": 1.690686871898126, "grad_norm": 0.8584945797920227, "learning_rate": 7.452136640657445e-05, "loss": 3.0888, "step": 191450 }, { "epoch": 1.6911284197884102, "grad_norm": 0.5445427894592285, "learning_rate": 7.450925518821075e-05, "loss": 3.0229, "step": 191500 }, { "epoch": 1.6915699676786944, "grad_norm": 1.948606014251709, "learning_rate": 7.449714207671494e-05, "loss": 3.092, "step": 191550 }, { "epoch": 1.6920115155689786, "grad_norm": 3.2247259616851807, "learning_rate": 7.448502707302259e-05, "loss": 2.8165, "step": 191600 }, { "epoch": 1.6924530634592627, "grad_norm": 4.422319412231445, "learning_rate": 7.447291017806957e-05, "loss": 3.4485, "step": 191650 }, { "epoch": 1.6928946113495469, "grad_norm": 1.4744811058044434, "learning_rate": 7.446079139279174e-05, "loss": 3.064, "step": 191700 }, { "epoch": 1.693336159239831, "grad_norm": 2.2750120162963867, "learning_rate": 7.444867071812521e-05, "loss": 3.4599, "step": 191750 }, { "epoch": 1.6937777071301152, "grad_norm": 1.9034446477890015, "learning_rate": 7.443654815500617e-05, "loss": 2.9978, "step": 191800 }, { "epoch": 1.6942192550203994, "grad_norm": 2.9149630069732666, "learning_rate": 7.442442370437099e-05, "loss": 3.5718, "step": 191850 }, { "epoch": 1.6946608029106836, "grad_norm": 2.397642135620117, "learning_rate": 7.441229736715623e-05, "loss": 3.0129, "step": 191900 }, { "epoch": 1.6951023508009677, "grad_norm": 4.1654486656188965, "learning_rate": 7.440016914429848e-05, "loss": 2.7316, "step": 191950 }, { "epoch": 1.695543898691252, "grad_norm": 3.3984122276306152, "learning_rate": 7.438803903673459e-05, "loss": 3.4284, "step": 192000 }, { "epoch": 1.695543898691252, "eval_asr_loss": 0.9019791327945982, "eval_loss": 2.891016721725464, "eval_runtime": 20.3988, "eval_samples_per_second": 37.649, "eval_steps_per_second": 9.412, "eval_tts_loss": 5.950324852027226, "step": 192000 }, { "epoch": 1.6959854465815363, "grad_norm": 3.025519609451294, "learning_rate": 7.437590704540147e-05, "loss": 3.2471, "step": 192050 }, { "epoch": 1.6964269944718204, "grad_norm": 3.1683413982391357, "learning_rate": 7.436377317123624e-05, "loss": 3.032, "step": 192100 }, { "epoch": 1.6968685423621046, "grad_norm": 1.259204387664795, "learning_rate": 7.435163741517614e-05, "loss": 3.3301, "step": 192150 }, { "epoch": 1.6973100902523888, "grad_norm": 2.364607572555542, "learning_rate": 7.433949977815852e-05, "loss": 3.5812, "step": 192200 }, { "epoch": 1.697751638142673, "grad_norm": 2.005537748336792, "learning_rate": 7.432736026112097e-05, "loss": 3.3359, "step": 192250 }, { "epoch": 1.698193186032957, "grad_norm": 3.5940768718719482, "learning_rate": 7.431546171133245e-05, "loss": 3.0394, "step": 192300 }, { "epoch": 1.6986347339232413, "grad_norm": 1.2887500524520874, "learning_rate": 7.43033184746218e-05, "loss": 3.191, "step": 192350 }, { "epoch": 1.6990762818135257, "grad_norm": 1.4926159381866455, "learning_rate": 7.429117336068591e-05, "loss": 3.4094, "step": 192400 }, { "epoch": 1.6995178297038098, "grad_norm": 2.109112501144409, "learning_rate": 7.427902637046287e-05, "loss": 3.4759, "step": 192450 }, { "epoch": 1.699959377594094, "grad_norm": 1.6524863243103027, "learning_rate": 7.426687750489089e-05, "loss": 3.1766, "step": 192500 }, { "epoch": 1.7004009254843782, "grad_norm": 3.5204215049743652, "learning_rate": 7.425472676490842e-05, "loss": 3.17, "step": 192550 }, { "epoch": 1.7008424733746623, "grad_norm": 1.593204140663147, "learning_rate": 7.424257415145399e-05, "loss": 3.2035, "step": 192600 }, { "epoch": 1.7012840212649465, "grad_norm": 1.7033207416534424, "learning_rate": 7.423041966546626e-05, "loss": 3.3376, "step": 192650 }, { "epoch": 1.7017255691552307, "grad_norm": 4.6197733879089355, "learning_rate": 7.42182633078841e-05, "loss": 3.334, "step": 192700 }, { "epoch": 1.7021671170455148, "grad_norm": 2.908355236053467, "learning_rate": 7.420610507964644e-05, "loss": 3.0815, "step": 192750 }, { "epoch": 1.702608664935799, "grad_norm": 1.182983160018921, "learning_rate": 7.419394498169244e-05, "loss": 2.8714, "step": 192800 }, { "epoch": 1.7030502128260832, "grad_norm": 4.0162224769592285, "learning_rate": 7.418178301496134e-05, "loss": 2.9672, "step": 192850 }, { "epoch": 1.7034917607163673, "grad_norm": 3.590345859527588, "learning_rate": 7.416961918039256e-05, "loss": 3.1313, "step": 192900 }, { "epoch": 1.7039333086066515, "grad_norm": 0.6673361659049988, "learning_rate": 7.415745347892565e-05, "loss": 3.1233, "step": 192950 }, { "epoch": 1.7043748564969357, "grad_norm": 2.611398935317993, "learning_rate": 7.414528591150032e-05, "loss": 3.2982, "step": 193000 }, { "epoch": 1.7048164043872198, "grad_norm": 3.113255739212036, "learning_rate": 7.413311647905638e-05, "loss": 3.0646, "step": 193050 }, { "epoch": 1.705257952277504, "grad_norm": 3.388859987258911, "learning_rate": 7.412094518253385e-05, "loss": 3.4217, "step": 193100 }, { "epoch": 1.7056995001677882, "grad_norm": 3.73885440826416, "learning_rate": 7.410877202287282e-05, "loss": 3.0224, "step": 193150 }, { "epoch": 1.7061410480580723, "grad_norm": 2.581843137741089, "learning_rate": 7.409659700101362e-05, "loss": 3.3819, "step": 193200 }, { "epoch": 1.7065825959483565, "grad_norm": 3.6114909648895264, "learning_rate": 7.408442011789661e-05, "loss": 2.8756, "step": 193250 }, { "epoch": 1.7070241438386407, "grad_norm": 1.801611304283142, "learning_rate": 7.407224137446239e-05, "loss": 3.2742, "step": 193300 }, { "epoch": 1.7074656917289248, "grad_norm": 2.8728644847869873, "learning_rate": 7.406006077165165e-05, "loss": 3.0712, "step": 193350 }, { "epoch": 1.707907239619209, "grad_norm": 3.180121660232544, "learning_rate": 7.404787831040525e-05, "loss": 3.21, "step": 193400 }, { "epoch": 1.7083487875094931, "grad_norm": 3.0456016063690186, "learning_rate": 7.403569399166415e-05, "loss": 3.0706, "step": 193450 }, { "epoch": 1.7087903353997773, "grad_norm": 1.180357813835144, "learning_rate": 7.402350781636951e-05, "loss": 2.9831, "step": 193500 }, { "epoch": 1.7092318832900615, "grad_norm": 2.2708914279937744, "learning_rate": 7.40113197854626e-05, "loss": 3.3696, "step": 193550 }, { "epoch": 1.7096734311803459, "grad_norm": 3.7013955116271973, "learning_rate": 7.399912989988487e-05, "loss": 2.7012, "step": 193600 }, { "epoch": 1.71011497907063, "grad_norm": 1.7447788715362549, "learning_rate": 7.398693816057783e-05, "loss": 3.5764, "step": 193650 }, { "epoch": 1.7105565269609142, "grad_norm": 1.3203649520874023, "learning_rate": 7.397474456848324e-05, "loss": 3.1377, "step": 193700 }, { "epoch": 1.7109980748511984, "grad_norm": 3.331073045730591, "learning_rate": 7.396254912454292e-05, "loss": 3.1011, "step": 193750 }, { "epoch": 1.7114396227414825, "grad_norm": 2.029482126235962, "learning_rate": 7.395035182969889e-05, "loss": 3.3574, "step": 193800 }, { "epoch": 1.7118811706317667, "grad_norm": 1.7867449522018433, "learning_rate": 7.393815268489328e-05, "loss": 3.1734, "step": 193850 }, { "epoch": 1.7123227185220509, "grad_norm": 2.170999526977539, "learning_rate": 7.392595169106834e-05, "loss": 3.3209, "step": 193900 }, { "epoch": 1.712764266412335, "grad_norm": 3.1686041355133057, "learning_rate": 7.391374884916654e-05, "loss": 3.1962, "step": 193950 }, { "epoch": 1.7132058143026194, "grad_norm": 3.2382314205169678, "learning_rate": 7.390154416013042e-05, "loss": 3.3819, "step": 194000 }, { "epoch": 1.7136473621929036, "grad_norm": 2.528313398361206, "learning_rate": 7.388933762490268e-05, "loss": 3.1662, "step": 194050 }, { "epoch": 1.7140889100831878, "grad_norm": 2.6687514781951904, "learning_rate": 7.38771292444262e-05, "loss": 2.9224, "step": 194100 }, { "epoch": 1.714530457973472, "grad_norm": 2.152829647064209, "learning_rate": 7.386491901964394e-05, "loss": 3.3078, "step": 194150 }, { "epoch": 1.714972005863756, "grad_norm": 3.414506673812866, "learning_rate": 7.385270695149906e-05, "loss": 3.1442, "step": 194200 }, { "epoch": 1.7154135537540403, "grad_norm": 1.722166895866394, "learning_rate": 7.384049304093485e-05, "loss": 3.3816, "step": 194250 }, { "epoch": 1.7158551016443244, "grad_norm": 3.0730419158935547, "learning_rate": 7.382827728889468e-05, "loss": 3.1881, "step": 194300 }, { "epoch": 1.7162966495346086, "grad_norm": 0.8389607667922974, "learning_rate": 7.381605969632218e-05, "loss": 3.1474, "step": 194350 }, { "epoch": 1.7167381974248928, "grad_norm": 1.5451440811157227, "learning_rate": 7.380384026416102e-05, "loss": 2.7997, "step": 194400 }, { "epoch": 1.717179745315177, "grad_norm": 1.2447178363800049, "learning_rate": 7.379161899335504e-05, "loss": 3.1987, "step": 194450 }, { "epoch": 1.717621293205461, "grad_norm": 1.0391019582748413, "learning_rate": 7.377939588484823e-05, "loss": 3.5185, "step": 194500 }, { "epoch": 1.7180628410957453, "grad_norm": 1.7594729661941528, "learning_rate": 7.376717093958477e-05, "loss": 3.1056, "step": 194550 }, { "epoch": 1.7185043889860294, "grad_norm": 4.89455509185791, "learning_rate": 7.375494415850885e-05, "loss": 3.3591, "step": 194600 }, { "epoch": 1.7189459368763136, "grad_norm": 1.501932144165039, "learning_rate": 7.374271554256495e-05, "loss": 3.4794, "step": 194650 }, { "epoch": 1.7193874847665978, "grad_norm": 3.946047067642212, "learning_rate": 7.373048509269759e-05, "loss": 2.8626, "step": 194700 }, { "epoch": 1.719829032656882, "grad_norm": 3.7815029621124268, "learning_rate": 7.371849747346553e-05, "loss": 3.3103, "step": 194750 }, { "epoch": 1.720270580547166, "grad_norm": 1.0685265064239502, "learning_rate": 7.370626339521693e-05, "loss": 3.1576, "step": 194800 }, { "epoch": 1.7207121284374503, "grad_norm": 2.738694190979004, "learning_rate": 7.369402748586054e-05, "loss": 3.3068, "step": 194850 }, { "epoch": 1.7211536763277344, "grad_norm": 2.3966355323791504, "learning_rate": 7.368178974634143e-05, "loss": 3.4519, "step": 194900 }, { "epoch": 1.7215952242180186, "grad_norm": 2.5047898292541504, "learning_rate": 7.366955017760488e-05, "loss": 3.3452, "step": 194950 }, { "epoch": 1.7220367721083027, "grad_norm": 1.0891519784927368, "learning_rate": 7.36573087805963e-05, "loss": 3.4548, "step": 195000 }, { "epoch": 1.7220367721083027, "eval_asr_loss": 0.9187432370274967, "eval_loss": 2.88369083404541, "eval_runtime": 20.6655, "eval_samples_per_second": 37.163, "eval_steps_per_second": 9.291, "eval_tts_loss": 5.93669020019495, "step": 195000 }, { "epoch": 1.722478319998587, "grad_norm": 2.0451600551605225, "learning_rate": 7.364506555626122e-05, "loss": 2.8664, "step": 195050 }, { "epoch": 1.722919867888871, "grad_norm": 2.9029061794281006, "learning_rate": 7.363282050554532e-05, "loss": 3.5179, "step": 195100 }, { "epoch": 1.7233614157791552, "grad_norm": 2.5034167766571045, "learning_rate": 7.362057362939445e-05, "loss": 3.0646, "step": 195150 }, { "epoch": 1.7238029636694396, "grad_norm": 3.19059681892395, "learning_rate": 7.360832492875455e-05, "loss": 2.7585, "step": 195200 }, { "epoch": 1.7242445115597238, "grad_norm": 0.7491962313652039, "learning_rate": 7.359607440457176e-05, "loss": 3.0848, "step": 195250 }, { "epoch": 1.724686059450008, "grad_norm": 1.9276366233825684, "learning_rate": 7.358382205779231e-05, "loss": 3.3159, "step": 195300 }, { "epoch": 1.7251276073402921, "grad_norm": 4.427799224853516, "learning_rate": 7.357156788936257e-05, "loss": 2.8902, "step": 195350 }, { "epoch": 1.7255691552305763, "grad_norm": 6.118712902069092, "learning_rate": 7.35593119002291e-05, "loss": 3.2164, "step": 195400 }, { "epoch": 1.7260107031208605, "grad_norm": 2.9821903705596924, "learning_rate": 7.354705409133858e-05, "loss": 3.344, "step": 195450 }, { "epoch": 1.7264522510111446, "grad_norm": 3.2221412658691406, "learning_rate": 7.353479446363778e-05, "loss": 3.3174, "step": 195500 }, { "epoch": 1.7268937989014288, "grad_norm": 2.3944759368896484, "learning_rate": 7.35225330180737e-05, "loss": 3.2564, "step": 195550 }, { "epoch": 1.7273353467917132, "grad_norm": 1.8751672506332397, "learning_rate": 7.351026975559338e-05, "loss": 3.0801, "step": 195600 }, { "epoch": 1.7277768946819974, "grad_norm": 1.0560152530670166, "learning_rate": 7.349800467714412e-05, "loss": 3.3012, "step": 195650 }, { "epoch": 1.7282184425722815, "grad_norm": 2.2292487621307373, "learning_rate": 7.348573778367323e-05, "loss": 3.2392, "step": 195700 }, { "epoch": 1.7286599904625657, "grad_norm": 2.8452000617980957, "learning_rate": 7.347346907612825e-05, "loss": 3.59, "step": 195750 }, { "epoch": 1.7291015383528499, "grad_norm": 4.416501998901367, "learning_rate": 7.346119855545685e-05, "loss": 3.4565, "step": 195800 }, { "epoch": 1.729543086243134, "grad_norm": 4.022757053375244, "learning_rate": 7.344892622260678e-05, "loss": 3.414, "step": 195850 }, { "epoch": 1.7299846341334182, "grad_norm": 1.0071609020233154, "learning_rate": 7.343665207852603e-05, "loss": 2.9478, "step": 195900 }, { "epoch": 1.7304261820237024, "grad_norm": 0.6758058667182922, "learning_rate": 7.342437612416263e-05, "loss": 3.2096, "step": 195950 }, { "epoch": 1.7308677299139865, "grad_norm": 2.8905625343322754, "learning_rate": 7.34120983604648e-05, "loss": 3.07, "step": 196000 }, { "epoch": 1.7313092778042707, "grad_norm": 2.475128173828125, "learning_rate": 7.339981878838092e-05, "loss": 3.4404, "step": 196050 }, { "epoch": 1.7317508256945549, "grad_norm": 0.9189386963844299, "learning_rate": 7.338753740885945e-05, "loss": 2.84, "step": 196100 }, { "epoch": 1.732192373584839, "grad_norm": 1.5804566144943237, "learning_rate": 7.337525422284904e-05, "loss": 3.3711, "step": 196150 }, { "epoch": 1.7326339214751232, "grad_norm": 1.715122938156128, "learning_rate": 7.336296923129844e-05, "loss": 3.3647, "step": 196200 }, { "epoch": 1.7330754693654074, "grad_norm": 1.8856436014175415, "learning_rate": 7.33506824351566e-05, "loss": 3.5442, "step": 196250 }, { "epoch": 1.7335170172556915, "grad_norm": 2.374765396118164, "learning_rate": 7.333839383537254e-05, "loss": 3.2726, "step": 196300 }, { "epoch": 1.7339585651459757, "grad_norm": 2.1815669536590576, "learning_rate": 7.332610343289545e-05, "loss": 3.4051, "step": 196350 }, { "epoch": 1.7344001130362598, "grad_norm": 2.3607351779937744, "learning_rate": 7.331381122867468e-05, "loss": 3.063, "step": 196400 }, { "epoch": 1.734841660926544, "grad_norm": 0.8316501379013062, "learning_rate": 7.330151722365972e-05, "loss": 2.9158, "step": 196450 }, { "epoch": 1.7352832088168282, "grad_norm": 2.3766934871673584, "learning_rate": 7.32892214188001e-05, "loss": 2.7718, "step": 196500 }, { "epoch": 1.7357247567071123, "grad_norm": 2.467963695526123, "learning_rate": 7.327692381504563e-05, "loss": 3.1836, "step": 196550 }, { "epoch": 1.7361663045973965, "grad_norm": 3.188460111618042, "learning_rate": 7.326462441334618e-05, "loss": 3.3165, "step": 196600 }, { "epoch": 1.7366078524876807, "grad_norm": 1.6318292617797852, "learning_rate": 7.325256925623005e-05, "loss": 3.717, "step": 196650 }, { "epoch": 1.7370494003779648, "grad_norm": 3.8458199501037598, "learning_rate": 7.324026629740243e-05, "loss": 3.4363, "step": 196700 }, { "epoch": 1.737490948268249, "grad_norm": 4.6386895179748535, "learning_rate": 7.322796154346132e-05, "loss": 3.296, "step": 196750 }, { "epoch": 1.7379324961585334, "grad_norm": 2.404712438583374, "learning_rate": 7.321565499535713e-05, "loss": 3.5477, "step": 196800 }, { "epoch": 1.7383740440488176, "grad_norm": 5.802834510803223, "learning_rate": 7.320334665404046e-05, "loss": 2.9515, "step": 196850 }, { "epoch": 1.7388155919391017, "grad_norm": 1.9599117040634155, "learning_rate": 7.319103652046201e-05, "loss": 3.067, "step": 196900 }, { "epoch": 1.739257139829386, "grad_norm": 3.9484918117523193, "learning_rate": 7.317872459557267e-05, "loss": 2.9862, "step": 196950 }, { "epoch": 1.73969868771967, "grad_norm": 4.071539878845215, "learning_rate": 7.316641088032339e-05, "loss": 2.9783, "step": 197000 }, { "epoch": 1.7401402356099542, "grad_norm": 2.045924663543701, "learning_rate": 7.315409537566532e-05, "loss": 3.0458, "step": 197050 }, { "epoch": 1.7405817835002384, "grad_norm": 3.734661817550659, "learning_rate": 7.314177808254971e-05, "loss": 3.3027, "step": 197100 }, { "epoch": 1.7410233313905226, "grad_norm": 2.9852235317230225, "learning_rate": 7.312945900192798e-05, "loss": 3.608, "step": 197150 }, { "epoch": 1.741464879280807, "grad_norm": 2.508160352706909, "learning_rate": 7.311713813475168e-05, "loss": 3.5748, "step": 197200 }, { "epoch": 1.7419064271710911, "grad_norm": 5.302500247955322, "learning_rate": 7.310481548197249e-05, "loss": 3.5315, "step": 197250 }, { "epoch": 1.7423479750613753, "grad_norm": 1.832080602645874, "learning_rate": 7.309249104454221e-05, "loss": 3.175, "step": 197300 }, { "epoch": 1.7427895229516595, "grad_norm": 4.390317916870117, "learning_rate": 7.308016482341285e-05, "loss": 2.9957, "step": 197350 }, { "epoch": 1.7432310708419436, "grad_norm": 1.2284395694732666, "learning_rate": 7.306783681953645e-05, "loss": 3.1387, "step": 197400 }, { "epoch": 1.7436726187322278, "grad_norm": 4.071477890014648, "learning_rate": 7.305550703386525e-05, "loss": 3.0247, "step": 197450 }, { "epoch": 1.744114166622512, "grad_norm": 4.2401227951049805, "learning_rate": 7.304317546735167e-05, "loss": 3.0524, "step": 197500 }, { "epoch": 1.7445557145127961, "grad_norm": 1.551368236541748, "learning_rate": 7.303084212094817e-05, "loss": 3.0328, "step": 197550 }, { "epoch": 1.7449972624030803, "grad_norm": 3.074233055114746, "learning_rate": 7.30185069956074e-05, "loss": 3.1914, "step": 197600 }, { "epoch": 1.7454388102933645, "grad_norm": 1.8998547792434692, "learning_rate": 7.300617009228215e-05, "loss": 3.3625, "step": 197650 }, { "epoch": 1.7458803581836486, "grad_norm": 1.0645414590835571, "learning_rate": 7.299383141192539e-05, "loss": 3.0895, "step": 197700 }, { "epoch": 1.7463219060739328, "grad_norm": 1.3525128364562988, "learning_rate": 7.29814909554901e-05, "loss": 3.1057, "step": 197750 }, { "epoch": 1.746763453964217, "grad_norm": 1.9818834066390991, "learning_rate": 7.296914872392952e-05, "loss": 3.3467, "step": 197800 }, { "epoch": 1.7472050018545011, "grad_norm": 2.3736205101013184, "learning_rate": 7.295680471819697e-05, "loss": 3.0871, "step": 197850 }, { "epoch": 1.7476465497447853, "grad_norm": 1.744696021080017, "learning_rate": 7.294445893924593e-05, "loss": 3.3649, "step": 197900 }, { "epoch": 1.7480880976350694, "grad_norm": 2.553920269012451, "learning_rate": 7.293211138803e-05, "loss": 2.8486, "step": 197950 }, { "epoch": 1.7485296455253536, "grad_norm": 2.8582518100738525, "learning_rate": 7.291976206550292e-05, "loss": 2.8678, "step": 198000 }, { "epoch": 1.7485296455253536, "eval_asr_loss": 0.9277813797653791, "eval_loss": 2.883857488632202, "eval_runtime": 20.4712, "eval_samples_per_second": 37.516, "eval_steps_per_second": 9.379, "eval_tts_loss": 5.936648276907347, "step": 198000 }, { "epoch": 1.7489711934156378, "grad_norm": 2.297837495803833, "learning_rate": 7.290741097261856e-05, "loss": 3.0111, "step": 198050 }, { "epoch": 1.749412741305922, "grad_norm": 3.550356388092041, "learning_rate": 7.289505811033099e-05, "loss": 3.5257, "step": 198100 }, { "epoch": 1.7498542891962061, "grad_norm": 4.377917289733887, "learning_rate": 7.288270347959429e-05, "loss": 3.2884, "step": 198150 }, { "epoch": 1.7502958370864903, "grad_norm": 1.634263038635254, "learning_rate": 7.28703470813628e-05, "loss": 2.6872, "step": 198200 }, { "epoch": 1.7507373849767744, "grad_norm": 4.123717308044434, "learning_rate": 7.285798891659092e-05, "loss": 3.2004, "step": 198250 }, { "epoch": 1.7511789328670586, "grad_norm": 1.9348206520080566, "learning_rate": 7.284562898623325e-05, "loss": 3.2221, "step": 198300 }, { "epoch": 1.7516204807573428, "grad_norm": 3.293262004852295, "learning_rate": 7.283326729124446e-05, "loss": 3.2032, "step": 198350 }, { "epoch": 1.7520620286476272, "grad_norm": 2.118044376373291, "learning_rate": 7.282090383257937e-05, "loss": 3.2974, "step": 198400 }, { "epoch": 1.7525035765379113, "grad_norm": 1.9033870697021484, "learning_rate": 7.280853861119298e-05, "loss": 3.4477, "step": 198450 }, { "epoch": 1.7529451244281955, "grad_norm": 3.468416452407837, "learning_rate": 7.279617162804042e-05, "loss": 2.9804, "step": 198500 }, { "epoch": 1.7533866723184797, "grad_norm": 3.426452159881592, "learning_rate": 7.278380288407689e-05, "loss": 3.0201, "step": 198550 }, { "epoch": 1.7538282202087638, "grad_norm": 4.912716865539551, "learning_rate": 7.277143238025779e-05, "loss": 3.0869, "step": 198600 }, { "epoch": 1.754269768099048, "grad_norm": 1.0470197200775146, "learning_rate": 7.275906011753863e-05, "loss": 3.5107, "step": 198650 }, { "epoch": 1.7547113159893322, "grad_norm": 2.836777687072754, "learning_rate": 7.274668609687507e-05, "loss": 3.0968, "step": 198700 }, { "epoch": 1.7551528638796163, "grad_norm": 3.8205599784851074, "learning_rate": 7.273431031922291e-05, "loss": 3.0864, "step": 198750 }, { "epoch": 1.7555944117699007, "grad_norm": 5.9745893478393555, "learning_rate": 7.272193278553803e-05, "loss": 3.2796, "step": 198800 }, { "epoch": 1.7560359596601849, "grad_norm": 1.6742478609085083, "learning_rate": 7.270955349677654e-05, "loss": 3.5679, "step": 198850 }, { "epoch": 1.756477507550469, "grad_norm": 1.8220934867858887, "learning_rate": 7.269717245389461e-05, "loss": 2.9193, "step": 198900 }, { "epoch": 1.7569190554407532, "grad_norm": 0.8500947952270508, "learning_rate": 7.268478965784857e-05, "loss": 2.9982, "step": 198950 }, { "epoch": 1.7573606033310374, "grad_norm": 3.3522162437438965, "learning_rate": 7.267240510959489e-05, "loss": 3.1133, "step": 199000 }, { "epoch": 1.7578021512213216, "grad_norm": 1.2531402111053467, "learning_rate": 7.266001881009018e-05, "loss": 2.583, "step": 199050 }, { "epoch": 1.7582436991116057, "grad_norm": 3.770021677017212, "learning_rate": 7.264763076029116e-05, "loss": 3.4063, "step": 199100 }, { "epoch": 1.7586852470018899, "grad_norm": 2.973428249359131, "learning_rate": 7.263524096115471e-05, "loss": 3.1737, "step": 199150 }, { "epoch": 1.759126794892174, "grad_norm": 1.8131953477859497, "learning_rate": 7.262284941363784e-05, "loss": 3.3857, "step": 199200 }, { "epoch": 1.7595683427824582, "grad_norm": 2.9858899116516113, "learning_rate": 7.261045611869767e-05, "loss": 3.3893, "step": 199250 }, { "epoch": 1.7600098906727424, "grad_norm": 3.661966323852539, "learning_rate": 7.259806107729155e-05, "loss": 3.2071, "step": 199300 }, { "epoch": 1.7604514385630265, "grad_norm": 3.4197046756744385, "learning_rate": 7.258566429037679e-05, "loss": 3.0726, "step": 199350 }, { "epoch": 1.7608929864533107, "grad_norm": 3.3999383449554443, "learning_rate": 7.2573265758911e-05, "loss": 3.6426, "step": 199400 }, { "epoch": 1.7613345343435949, "grad_norm": 1.2242519855499268, "learning_rate": 7.256086548385183e-05, "loss": 2.9793, "step": 199450 }, { "epoch": 1.761776082233879, "grad_norm": 5.0131964683532715, "learning_rate": 7.254846346615713e-05, "loss": 3.4929, "step": 199500 }, { "epoch": 1.7622176301241632, "grad_norm": 3.7067580223083496, "learning_rate": 7.253605970678483e-05, "loss": 3.1712, "step": 199550 }, { "epoch": 1.7626591780144474, "grad_norm": 1.0138520002365112, "learning_rate": 7.2523654206693e-05, "loss": 3.5418, "step": 199600 }, { "epoch": 1.7631007259047315, "grad_norm": 0.7684496641159058, "learning_rate": 7.251124696683989e-05, "loss": 2.7712, "step": 199650 }, { "epoch": 1.7635422737950157, "grad_norm": 3.878993272781372, "learning_rate": 7.249883798818386e-05, "loss": 3.6168, "step": 199700 }, { "epoch": 1.7639838216852999, "grad_norm": 3.2014262676239014, "learning_rate": 7.248642727168335e-05, "loss": 3.2813, "step": 199750 }, { "epoch": 1.764425369575584, "grad_norm": 3.0954771041870117, "learning_rate": 7.247401481829703e-05, "loss": 3.6834, "step": 199800 }, { "epoch": 1.7648669174658682, "grad_norm": 1.132131814956665, "learning_rate": 7.246160062898364e-05, "loss": 2.5174, "step": 199850 }, { "epoch": 1.7653084653561524, "grad_norm": 2.3023476600646973, "learning_rate": 7.244918470470206e-05, "loss": 3.2556, "step": 199900 }, { "epoch": 1.7657500132464365, "grad_norm": 4.4456586837768555, "learning_rate": 7.243676704641134e-05, "loss": 3.2855, "step": 199950 }, { "epoch": 1.766191561136721, "grad_norm": 3.184326171875, "learning_rate": 7.242434765507061e-05, "loss": 3.531, "step": 200000 }, { "epoch": 1.766633109027005, "grad_norm": 1.8845802545547485, "learning_rate": 7.241192653163918e-05, "loss": 2.9489, "step": 200050 }, { "epoch": 1.7670746569172893, "grad_norm": 2.8838372230529785, "learning_rate": 7.239950367707647e-05, "loss": 2.9885, "step": 200100 }, { "epoch": 1.7675162048075734, "grad_norm": 0.9097310304641724, "learning_rate": 7.238707909234206e-05, "loss": 3.262, "step": 200150 }, { "epoch": 1.7679577526978576, "grad_norm": 2.829453229904175, "learning_rate": 7.237465277839561e-05, "loss": 3.2191, "step": 200200 }, { "epoch": 1.7683993005881418, "grad_norm": 1.0789093971252441, "learning_rate": 7.236222473619697e-05, "loss": 3.1551, "step": 200250 }, { "epoch": 1.768840848478426, "grad_norm": 1.0863782167434692, "learning_rate": 7.234979496670609e-05, "loss": 2.9866, "step": 200300 }, { "epoch": 1.76928239636871, "grad_norm": 1.4690102338790894, "learning_rate": 7.233736347088307e-05, "loss": 2.9813, "step": 200350 }, { "epoch": 1.7697239442589945, "grad_norm": 2.871446132659912, "learning_rate": 7.232493024968815e-05, "loss": 3.1252, "step": 200400 }, { "epoch": 1.7701654921492787, "grad_norm": 1.6246694326400757, "learning_rate": 7.231249530408168e-05, "loss": 3.0882, "step": 200450 }, { "epoch": 1.7706070400395628, "grad_norm": 6.12792444229126, "learning_rate": 7.230005863502413e-05, "loss": 2.9357, "step": 200500 }, { "epoch": 1.771048587929847, "grad_norm": 2.492288589477539, "learning_rate": 7.228762024347616e-05, "loss": 3.2043, "step": 200550 }, { "epoch": 1.7714901358201312, "grad_norm": 1.2008299827575684, "learning_rate": 7.227518013039851e-05, "loss": 2.7181, "step": 200600 }, { "epoch": 1.7719316837104153, "grad_norm": 3.010150909423828, "learning_rate": 7.226273829675211e-05, "loss": 3.1411, "step": 200650 }, { "epoch": 1.7723732316006995, "grad_norm": 2.3677845001220703, "learning_rate": 7.225029474349794e-05, "loss": 3.4423, "step": 200700 }, { "epoch": 1.7728147794909836, "grad_norm": 4.701264381408691, "learning_rate": 7.223784947159717e-05, "loss": 3.2663, "step": 200750 }, { "epoch": 1.7732563273812678, "grad_norm": 0.874535322189331, "learning_rate": 7.222540248201112e-05, "loss": 3.0042, "step": 200800 }, { "epoch": 1.773697875271552, "grad_norm": 2.3065900802612305, "learning_rate": 7.221295377570119e-05, "loss": 3.1783, "step": 200850 }, { "epoch": 1.7741394231618361, "grad_norm": 3.3488705158233643, "learning_rate": 7.220050335362894e-05, "loss": 3.3362, "step": 200900 }, { "epoch": 1.7745809710521203, "grad_norm": 1.8239176273345947, "learning_rate": 7.218805121675605e-05, "loss": 3.3862, "step": 200950 }, { "epoch": 1.7750225189424045, "grad_norm": 2.1905157566070557, "learning_rate": 7.217559736604435e-05, "loss": 3.5234, "step": 201000 }, { "epoch": 1.7750225189424045, "eval_asr_loss": 0.910745237708085, "eval_loss": 2.8749611377716064, "eval_runtime": 20.3917, "eval_samples_per_second": 37.662, "eval_steps_per_second": 9.416, "eval_tts_loss": 5.940568454043894, "step": 201000 }, { "epoch": 1.7754640668326886, "grad_norm": 1.463396430015564, "learning_rate": 7.216314180245581e-05, "loss": 3.2108, "step": 201050 }, { "epoch": 1.7759056147229728, "grad_norm": 2.2169008255004883, "learning_rate": 7.21506845269525e-05, "loss": 3.3798, "step": 201100 }, { "epoch": 1.776347162613257, "grad_norm": 2.9501075744628906, "learning_rate": 7.213822554049664e-05, "loss": 3.1308, "step": 201150 }, { "epoch": 1.7767887105035411, "grad_norm": 2.21448016166687, "learning_rate": 7.212576484405058e-05, "loss": 3.1955, "step": 201200 }, { "epoch": 1.7772302583938253, "grad_norm": 2.7748055458068848, "learning_rate": 7.211330243857682e-05, "loss": 3.0091, "step": 201250 }, { "epoch": 1.7776718062841095, "grad_norm": 2.3246281147003174, "learning_rate": 7.210083832503796e-05, "loss": 3.2605, "step": 201300 }, { "epoch": 1.7781133541743936, "grad_norm": 2.713412046432495, "learning_rate": 7.208837250439675e-05, "loss": 3.0552, "step": 201350 }, { "epoch": 1.7785549020646778, "grad_norm": 1.687602162361145, "learning_rate": 7.207590497761604e-05, "loss": 3.2309, "step": 201400 }, { "epoch": 1.778996449954962, "grad_norm": 6.353885650634766, "learning_rate": 7.20634357456589e-05, "loss": 3.3475, "step": 201450 }, { "epoch": 1.7794379978452461, "grad_norm": 2.1135149002075195, "learning_rate": 7.205096480948844e-05, "loss": 2.8696, "step": 201500 }, { "epoch": 1.7798795457355303, "grad_norm": 2.8203511238098145, "learning_rate": 7.203849217006792e-05, "loss": 2.8179, "step": 201550 }, { "epoch": 1.7803210936258147, "grad_norm": 3.2417795658111572, "learning_rate": 7.202601782836075e-05, "loss": 3.2816, "step": 201600 }, { "epoch": 1.7807626415160989, "grad_norm": 3.100700855255127, "learning_rate": 7.201354178533051e-05, "loss": 3.1216, "step": 201650 }, { "epoch": 1.781204189406383, "grad_norm": 3.2376022338867188, "learning_rate": 7.200106404194082e-05, "loss": 3.1, "step": 201700 }, { "epoch": 1.7816457372966672, "grad_norm": 2.8501505851745605, "learning_rate": 7.198858459915549e-05, "loss": 2.8905, "step": 201750 }, { "epoch": 1.7820872851869514, "grad_norm": 3.309192180633545, "learning_rate": 7.197610345793848e-05, "loss": 2.9565, "step": 201800 }, { "epoch": 1.7825288330772355, "grad_norm": 2.582115888595581, "learning_rate": 7.196362061925381e-05, "loss": 3.7028, "step": 201850 }, { "epoch": 1.7829703809675197, "grad_norm": 2.774749517440796, "learning_rate": 7.195113608406569e-05, "loss": 3.2302, "step": 201900 }, { "epoch": 1.7834119288578039, "grad_norm": 1.1029777526855469, "learning_rate": 7.193864985333847e-05, "loss": 3.3933, "step": 201950 }, { "epoch": 1.7838534767480883, "grad_norm": 3.4153521060943604, "learning_rate": 7.192616192803656e-05, "loss": 3.4729, "step": 202000 }, { "epoch": 1.7842950246383724, "grad_norm": 4.102583885192871, "learning_rate": 7.191367230912459e-05, "loss": 2.8795, "step": 202050 }, { "epoch": 1.7847365725286566, "grad_norm": 0.49508291482925415, "learning_rate": 7.190118099756724e-05, "loss": 3.681, "step": 202100 }, { "epoch": 1.7851781204189408, "grad_norm": 1.6070566177368164, "learning_rate": 7.188893787096637e-05, "loss": 3.4495, "step": 202150 }, { "epoch": 1.785619668309225, "grad_norm": 1.8097330331802368, "learning_rate": 7.187644321081782e-05, "loss": 3.138, "step": 202200 }, { "epoch": 1.786061216199509, "grad_norm": 1.518096923828125, "learning_rate": 7.186394686089954e-05, "loss": 2.7297, "step": 202250 }, { "epoch": 1.7865027640897932, "grad_norm": 3.6704065799713135, "learning_rate": 7.185144882217678e-05, "loss": 2.875, "step": 202300 }, { "epoch": 1.7869443119800774, "grad_norm": 1.533138632774353, "learning_rate": 7.18389490956149e-05, "loss": 2.9326, "step": 202350 }, { "epoch": 1.7873858598703616, "grad_norm": 2.0090863704681396, "learning_rate": 7.182644768217936e-05, "loss": 3.4076, "step": 202400 }, { "epoch": 1.7878274077606457, "grad_norm": 3.815380573272705, "learning_rate": 7.181394458283583e-05, "loss": 3.3829, "step": 202450 }, { "epoch": 1.78826895565093, "grad_norm": 2.4792747497558594, "learning_rate": 7.180143979855008e-05, "loss": 3.314, "step": 202500 }, { "epoch": 1.788710503541214, "grad_norm": 1.389562964439392, "learning_rate": 7.178893333028794e-05, "loss": 3.0, "step": 202550 }, { "epoch": 1.7891520514314982, "grad_norm": 1.2650171518325806, "learning_rate": 7.177642517901549e-05, "loss": 3.5354, "step": 202600 }, { "epoch": 1.7895935993217824, "grad_norm": 3.26472806930542, "learning_rate": 7.176391534569885e-05, "loss": 3.3161, "step": 202650 }, { "epoch": 1.7900351472120666, "grad_norm": 2.080465793609619, "learning_rate": 7.17514038313043e-05, "loss": 2.8847, "step": 202700 }, { "epoch": 1.7904766951023507, "grad_norm": 2.661543369293213, "learning_rate": 7.173889063679826e-05, "loss": 3.2848, "step": 202750 }, { "epoch": 1.790918242992635, "grad_norm": 1.5421067476272583, "learning_rate": 7.172637576314724e-05, "loss": 3.3914, "step": 202800 }, { "epoch": 1.791359790882919, "grad_norm": 4.784469127655029, "learning_rate": 7.171385921131793e-05, "loss": 3.0531, "step": 202850 }, { "epoch": 1.7918013387732032, "grad_norm": 5.333433151245117, "learning_rate": 7.170134098227713e-05, "loss": 3.3758, "step": 202900 }, { "epoch": 1.7922428866634874, "grad_norm": 1.1900759935379028, "learning_rate": 7.168882107699178e-05, "loss": 3.2778, "step": 202950 }, { "epoch": 1.7926844345537716, "grad_norm": 4.2666096687316895, "learning_rate": 7.167629949642889e-05, "loss": 3.0026, "step": 203000 }, { "epoch": 1.7931259824440557, "grad_norm": 2.9323208332061768, "learning_rate": 7.166377624155567e-05, "loss": 3.0496, "step": 203050 }, { "epoch": 1.79356753033434, "grad_norm": 4.785465717315674, "learning_rate": 7.165125131333946e-05, "loss": 3.0047, "step": 203100 }, { "epoch": 1.794009078224624, "grad_norm": 2.6500496864318848, "learning_rate": 7.163872471274768e-05, "loss": 2.8693, "step": 203150 }, { "epoch": 1.7944506261149085, "grad_norm": 4.793989181518555, "learning_rate": 7.16261964407479e-05, "loss": 3.3363, "step": 203200 }, { "epoch": 1.7948921740051926, "grad_norm": 4.713131904602051, "learning_rate": 7.161366649830783e-05, "loss": 3.1709, "step": 203250 }, { "epoch": 1.7953337218954768, "grad_norm": 1.1408799886703491, "learning_rate": 7.16011348863953e-05, "loss": 3.091, "step": 203300 }, { "epoch": 1.795775269785761, "grad_norm": 4.1913676261901855, "learning_rate": 7.158860160597828e-05, "loss": 2.9463, "step": 203350 }, { "epoch": 1.7962168176760451, "grad_norm": 3.7222936153411865, "learning_rate": 7.157606665802484e-05, "loss": 3.5081, "step": 203400 }, { "epoch": 1.7966583655663293, "grad_norm": 2.148895740509033, "learning_rate": 7.156353004350321e-05, "loss": 2.9903, "step": 203450 }, { "epoch": 1.7970999134566135, "grad_norm": 1.5505282878875732, "learning_rate": 7.155099176338176e-05, "loss": 3.1496, "step": 203500 }, { "epoch": 1.7975414613468979, "grad_norm": 3.622462511062622, "learning_rate": 7.153845181862893e-05, "loss": 2.7559, "step": 203550 }, { "epoch": 1.797983009237182, "grad_norm": 4.700122356414795, "learning_rate": 7.152591021021332e-05, "loss": 3.12, "step": 203600 }, { "epoch": 1.7984245571274662, "grad_norm": 1.3534070253372192, "learning_rate": 7.15133669391037e-05, "loss": 2.584, "step": 203650 }, { "epoch": 1.7988661050177503, "grad_norm": 2.837641954421997, "learning_rate": 7.150082200626891e-05, "loss": 3.6518, "step": 203700 }, { "epoch": 1.7993076529080345, "grad_norm": 3.3624727725982666, "learning_rate": 7.148827541267794e-05, "loss": 3.2949, "step": 203750 }, { "epoch": 1.7997492007983187, "grad_norm": 2.1359448432922363, "learning_rate": 7.147572715929991e-05, "loss": 3.3417, "step": 203800 }, { "epoch": 1.8001907486886028, "grad_norm": 2.3549392223358154, "learning_rate": 7.146317724710407e-05, "loss": 2.98, "step": 203850 }, { "epoch": 1.800632296578887, "grad_norm": 2.2345235347747803, "learning_rate": 7.145062567705979e-05, "loss": 3.5184, "step": 203900 }, { "epoch": 1.8010738444691712, "grad_norm": 2.326660394668579, "learning_rate": 7.143807245013656e-05, "loss": 2.8502, "step": 203950 }, { "epoch": 1.8015153923594553, "grad_norm": 2.0236425399780273, "learning_rate": 7.142551756730403e-05, "loss": 3.163, "step": 204000 }, { "epoch": 1.8015153923594553, "eval_asr_loss": 0.9254957463829835, "eval_loss": 2.87661075592041, "eval_runtime": 20.4661, "eval_samples_per_second": 37.525, "eval_steps_per_second": 9.381, "eval_tts_loss": 5.962671826088398, "step": 204000 }, { "epoch": 1.8019569402497395, "grad_norm": 1.7293661832809448, "learning_rate": 7.141296102953195e-05, "loss": 2.9224, "step": 204050 }, { "epoch": 1.8023984881400237, "grad_norm": 1.66663658618927, "learning_rate": 7.140040283779021e-05, "loss": 3.1893, "step": 204100 }, { "epoch": 1.8028400360303078, "grad_norm": 2.202199935913086, "learning_rate": 7.138784299304882e-05, "loss": 3.2132, "step": 204150 }, { "epoch": 1.803281583920592, "grad_norm": 2.037201404571533, "learning_rate": 7.137528149627792e-05, "loss": 3.3609, "step": 204200 }, { "epoch": 1.8037231318108762, "grad_norm": 2.08586049079895, "learning_rate": 7.136271834844778e-05, "loss": 3.123, "step": 204250 }, { "epoch": 1.8041646797011603, "grad_norm": 2.966116189956665, "learning_rate": 7.135040486265179e-05, "loss": 2.9609, "step": 204300 }, { "epoch": 1.8046062275914445, "grad_norm": 1.4813175201416016, "learning_rate": 7.133783844858734e-05, "loss": 3.1627, "step": 204350 }, { "epoch": 1.8050477754817287, "grad_norm": 1.58802330493927, "learning_rate": 7.132527038635583e-05, "loss": 3.3023, "step": 204400 }, { "epoch": 1.8054893233720128, "grad_norm": 1.9538745880126953, "learning_rate": 7.1312700676928e-05, "loss": 3.1937, "step": 204450 }, { "epoch": 1.805930871262297, "grad_norm": 2.2432916164398193, "learning_rate": 7.130012932127477e-05, "loss": 3.3247, "step": 204500 }, { "epoch": 1.8063724191525812, "grad_norm": 3.8045809268951416, "learning_rate": 7.128755632036717e-05, "loss": 3.459, "step": 204550 }, { "epoch": 1.8068139670428653, "grad_norm": 4.374520301818848, "learning_rate": 7.127498167517636e-05, "loss": 3.141, "step": 204600 }, { "epoch": 1.8072555149331495, "grad_norm": 2.797487735748291, "learning_rate": 7.126240538667361e-05, "loss": 3.7096, "step": 204650 }, { "epoch": 1.8076970628234337, "grad_norm": 1.1959338188171387, "learning_rate": 7.124982745583037e-05, "loss": 3.4067, "step": 204700 }, { "epoch": 1.808138610713718, "grad_norm": 4.910750389099121, "learning_rate": 7.123724788361814e-05, "loss": 3.1069, "step": 204750 }, { "epoch": 1.8085801586040022, "grad_norm": 1.9608979225158691, "learning_rate": 7.12246666710086e-05, "loss": 3.6457, "step": 204800 }, { "epoch": 1.8090217064942864, "grad_norm": 4.217527866363525, "learning_rate": 7.121208381897353e-05, "loss": 3.1097, "step": 204850 }, { "epoch": 1.8094632543845706, "grad_norm": 1.2531399726867676, "learning_rate": 7.119949932848486e-05, "loss": 3.1505, "step": 204900 }, { "epoch": 1.8099048022748547, "grad_norm": 2.24678897857666, "learning_rate": 7.118691320051464e-05, "loss": 3.1959, "step": 204950 }, { "epoch": 1.810346350165139, "grad_norm": 1.1606642007827759, "learning_rate": 7.117432543603503e-05, "loss": 3.3106, "step": 205000 }, { "epoch": 1.810787898055423, "grad_norm": 6.087655067443848, "learning_rate": 7.116173603601834e-05, "loss": 3.097, "step": 205050 }, { "epoch": 1.8112294459457072, "grad_norm": 2.541350841522217, "learning_rate": 7.114914500143698e-05, "loss": 3.3576, "step": 205100 }, { "epoch": 1.8116709938359916, "grad_norm": 3.244662284851074, "learning_rate": 7.113655233326351e-05, "loss": 3.3987, "step": 205150 }, { "epoch": 1.8121125417262758, "grad_norm": 1.9999686479568481, "learning_rate": 7.11239580324706e-05, "loss": 3.0979, "step": 205200 }, { "epoch": 1.81255408961656, "grad_norm": 2.8434536457061768, "learning_rate": 7.111136210003106e-05, "loss": 3.2806, "step": 205250 }, { "epoch": 1.8129956375068441, "grad_norm": 1.383022665977478, "learning_rate": 7.109876453691781e-05, "loss": 2.9514, "step": 205300 }, { "epoch": 1.8134371853971283, "grad_norm": 4.14407205581665, "learning_rate": 7.108616534410394e-05, "loss": 3.2434, "step": 205350 }, { "epoch": 1.8138787332874124, "grad_norm": 4.073207855224609, "learning_rate": 7.107356452256258e-05, "loss": 3.2738, "step": 205400 }, { "epoch": 1.8143202811776966, "grad_norm": 2.9110472202301025, "learning_rate": 7.106096207326707e-05, "loss": 3.0951, "step": 205450 }, { "epoch": 1.8147618290679808, "grad_norm": 4.486078262329102, "learning_rate": 7.104835799719083e-05, "loss": 3.1462, "step": 205500 }, { "epoch": 1.815203376958265, "grad_norm": 2.024273157119751, "learning_rate": 7.103575229530742e-05, "loss": 3.2425, "step": 205550 }, { "epoch": 1.8156449248485491, "grad_norm": 1.897672414779663, "learning_rate": 7.102314496859052e-05, "loss": 3.2251, "step": 205600 }, { "epoch": 1.8160864727388333, "grad_norm": 3.416689395904541, "learning_rate": 7.101053601801392e-05, "loss": 2.9973, "step": 205650 }, { "epoch": 1.8165280206291174, "grad_norm": 1.4610952138900757, "learning_rate": 7.099792544455161e-05, "loss": 3.0895, "step": 205700 }, { "epoch": 1.8169695685194016, "grad_norm": 3.8125312328338623, "learning_rate": 7.09853132491776e-05, "loss": 2.9728, "step": 205750 }, { "epoch": 1.8174111164096858, "grad_norm": 2.8992254734039307, "learning_rate": 7.09726994328661e-05, "loss": 3.2576, "step": 205800 }, { "epoch": 1.81785266429997, "grad_norm": 3.9919283390045166, "learning_rate": 7.09600839965914e-05, "loss": 3.0434, "step": 205850 }, { "epoch": 1.8182942121902541, "grad_norm": 1.9868943691253662, "learning_rate": 7.094746694132795e-05, "loss": 3.1502, "step": 205900 }, { "epoch": 1.8187357600805383, "grad_norm": 3.0747931003570557, "learning_rate": 7.093484826805032e-05, "loss": 2.5582, "step": 205950 }, { "epoch": 1.8191773079708224, "grad_norm": 3.5087995529174805, "learning_rate": 7.092222797773318e-05, "loss": 3.1024, "step": 206000 }, { "epoch": 1.8196188558611066, "grad_norm": 2.8471686840057373, "learning_rate": 7.090960607135134e-05, "loss": 3.008, "step": 206050 }, { "epoch": 1.8200604037513908, "grad_norm": 3.2857754230499268, "learning_rate": 7.089698254987975e-05, "loss": 3.1496, "step": 206100 }, { "epoch": 1.820501951641675, "grad_norm": 3.396155834197998, "learning_rate": 7.088435741429344e-05, "loss": 2.8294, "step": 206150 }, { "epoch": 1.820943499531959, "grad_norm": 3.1792287826538086, "learning_rate": 7.087173066556762e-05, "loss": 3.3667, "step": 206200 }, { "epoch": 1.8213850474222433, "grad_norm": 2.4418413639068604, "learning_rate": 7.085910230467761e-05, "loss": 2.8149, "step": 206250 }, { "epoch": 1.8218265953125274, "grad_norm": 1.8641915321350098, "learning_rate": 7.08464723325988e-05, "loss": 2.9135, "step": 206300 }, { "epoch": 1.8222681432028118, "grad_norm": 4.3222880363464355, "learning_rate": 7.083384075030681e-05, "loss": 3.2363, "step": 206350 }, { "epoch": 1.822709691093096, "grad_norm": 2.6220529079437256, "learning_rate": 7.082120755877726e-05, "loss": 3.1948, "step": 206400 }, { "epoch": 1.8231512389833802, "grad_norm": 3.988168954849243, "learning_rate": 7.080857275898598e-05, "loss": 3.438, "step": 206450 }, { "epoch": 1.8235927868736643, "grad_norm": 2.376438856124878, "learning_rate": 7.079593635190892e-05, "loss": 3.5711, "step": 206500 }, { "epoch": 1.8240343347639485, "grad_norm": 5.018946170806885, "learning_rate": 7.078329833852212e-05, "loss": 2.6372, "step": 206550 }, { "epoch": 1.8244758826542327, "grad_norm": 1.6604958772659302, "learning_rate": 7.077065871980177e-05, "loss": 3.1432, "step": 206600 }, { "epoch": 1.8249174305445168, "grad_norm": 1.9766267538070679, "learning_rate": 7.075801749672415e-05, "loss": 3.1183, "step": 206650 }, { "epoch": 1.825358978434801, "grad_norm": 2.3561177253723145, "learning_rate": 7.074537467026569e-05, "loss": 3.378, "step": 206700 }, { "epoch": 1.8258005263250854, "grad_norm": 0.6396112442016602, "learning_rate": 7.073273024140298e-05, "loss": 3.4214, "step": 206750 }, { "epoch": 1.8262420742153695, "grad_norm": 3.366499662399292, "learning_rate": 7.072008421111265e-05, "loss": 2.9011, "step": 206800 }, { "epoch": 1.8266836221056537, "grad_norm": 2.9011270999908447, "learning_rate": 7.070743658037152e-05, "loss": 3.3191, "step": 206850 }, { "epoch": 1.8271251699959379, "grad_norm": 2.483954906463623, "learning_rate": 7.069504035042934e-05, "loss": 2.9523, "step": 206900 }, { "epoch": 1.827566717886222, "grad_norm": 3.9285833835601807, "learning_rate": 7.068238955367785e-05, "loss": 2.9662, "step": 206950 }, { "epoch": 1.8280082657765062, "grad_norm": 2.934602975845337, "learning_rate": 7.066973715938715e-05, "loss": 3.7216, "step": 207000 }, { "epoch": 1.8280082657765062, "eval_asr_loss": 0.9368323913638121, "eval_loss": 2.8737800121307373, "eval_runtime": 20.803, "eval_samples_per_second": 36.918, "eval_steps_per_second": 9.229, "eval_tts_loss": 5.962727203036098, "step": 207000 }, { "epoch": 1.8284498136667904, "grad_norm": 3.736560821533203, "learning_rate": 7.065708316853454e-05, "loss": 2.9009, "step": 207050 }, { "epoch": 1.8288913615570745, "grad_norm": 1.8733489513397217, "learning_rate": 7.06444275820974e-05, "loss": 3.076, "step": 207100 }, { "epoch": 1.8293329094473587, "grad_norm": 4.141226768493652, "learning_rate": 7.06317704010533e-05, "loss": 3.1736, "step": 207150 }, { "epoch": 1.8297744573376429, "grad_norm": 2.7284300327301025, "learning_rate": 7.06191116263799e-05, "loss": 2.8449, "step": 207200 }, { "epoch": 1.830216005227927, "grad_norm": 1.1890994310379028, "learning_rate": 7.060645125905497e-05, "loss": 3.0688, "step": 207250 }, { "epoch": 1.8306575531182112, "grad_norm": 2.6329152584075928, "learning_rate": 7.05937893000564e-05, "loss": 3.0454, "step": 207300 }, { "epoch": 1.8310991010084954, "grad_norm": 4.558962821960449, "learning_rate": 7.058112575036224e-05, "loss": 3.0297, "step": 207350 }, { "epoch": 1.8315406488987795, "grad_norm": 2.9594004154205322, "learning_rate": 7.056846061095063e-05, "loss": 3.09, "step": 207400 }, { "epoch": 1.8319821967890637, "grad_norm": 2.892536163330078, "learning_rate": 7.055579388279988e-05, "loss": 2.9773, "step": 207450 }, { "epoch": 1.8324237446793479, "grad_norm": 2.9856982231140137, "learning_rate": 7.054312556688835e-05, "loss": 3.4105, "step": 207500 }, { "epoch": 1.832865292569632, "grad_norm": 3.486680746078491, "learning_rate": 7.053045566419457e-05, "loss": 3.513, "step": 207550 }, { "epoch": 1.8333068404599162, "grad_norm": 1.4230419397354126, "learning_rate": 7.051778417569719e-05, "loss": 3.2426, "step": 207600 }, { "epoch": 1.8337483883502004, "grad_norm": 1.3702068328857422, "learning_rate": 7.050511110237497e-05, "loss": 3.2997, "step": 207650 }, { "epoch": 1.8341899362404845, "grad_norm": 3.2727692127227783, "learning_rate": 7.049243644520677e-05, "loss": 3.0542, "step": 207700 }, { "epoch": 1.8346314841307687, "grad_norm": 2.400622606277466, "learning_rate": 7.047976020517164e-05, "loss": 2.4248, "step": 207750 }, { "epoch": 1.8350730320210529, "grad_norm": 2.2951934337615967, "learning_rate": 7.04670823832487e-05, "loss": 3.0014, "step": 207800 }, { "epoch": 1.835514579911337, "grad_norm": 4.202934741973877, "learning_rate": 7.045440298041721e-05, "loss": 3.0171, "step": 207850 }, { "epoch": 1.8359561278016212, "grad_norm": 5.109984874725342, "learning_rate": 7.044172199765652e-05, "loss": 3.0452, "step": 207900 }, { "epoch": 1.8363976756919056, "grad_norm": 0.8654121160507202, "learning_rate": 7.042903943594617e-05, "loss": 3.245, "step": 207950 }, { "epoch": 1.8368392235821898, "grad_norm": 2.106384754180908, "learning_rate": 7.041635529626574e-05, "loss": 3.1378, "step": 208000 }, { "epoch": 1.837280771472474, "grad_norm": 2.5134472846984863, "learning_rate": 7.040366957959499e-05, "loss": 3.4982, "step": 208050 }, { "epoch": 1.837722319362758, "grad_norm": 3.753645181655884, "learning_rate": 7.039123604820596e-05, "loss": 3.2786, "step": 208100 }, { "epoch": 1.8381638672530423, "grad_norm": 2.0565855503082275, "learning_rate": 7.037854721198529e-05, "loss": 2.9289, "step": 208150 }, { "epoch": 1.8386054151433264, "grad_norm": 2.073881149291992, "learning_rate": 7.036585680169465e-05, "loss": 3.1887, "step": 208200 }, { "epoch": 1.8390469630336106, "grad_norm": 1.2692102193832397, "learning_rate": 7.035316481831427e-05, "loss": 2.9537, "step": 208250 }, { "epoch": 1.8394885109238948, "grad_norm": 1.5824757814407349, "learning_rate": 7.034047126282451e-05, "loss": 2.8978, "step": 208300 }, { "epoch": 1.8399300588141791, "grad_norm": 4.089334964752197, "learning_rate": 7.032777613620582e-05, "loss": 3.3272, "step": 208350 }, { "epoch": 1.8403716067044633, "grad_norm": 2.8007025718688965, "learning_rate": 7.031507943943879e-05, "loss": 3.1612, "step": 208400 }, { "epoch": 1.8408131545947475, "grad_norm": 1.6686805486679077, "learning_rate": 7.030238117350414e-05, "loss": 2.7393, "step": 208450 }, { "epoch": 1.8412547024850316, "grad_norm": 3.463226079940796, "learning_rate": 7.028968133938269e-05, "loss": 2.7314, "step": 208500 }, { "epoch": 1.8416962503753158, "grad_norm": 4.528909683227539, "learning_rate": 7.027697993805544e-05, "loss": 3.1757, "step": 208550 }, { "epoch": 1.8421377982656, "grad_norm": 2.405137538909912, "learning_rate": 7.026427697050342e-05, "loss": 3.126, "step": 208600 }, { "epoch": 1.8425793461558841, "grad_norm": 2.5665478706359863, "learning_rate": 7.025157243770782e-05, "loss": 3.4373, "step": 208650 }, { "epoch": 1.8430208940461683, "grad_norm": 1.1757197380065918, "learning_rate": 7.023886634065e-05, "loss": 3.4072, "step": 208700 }, { "epoch": 1.8434624419364525, "grad_norm": 1.8347642421722412, "learning_rate": 7.022615868031138e-05, "loss": 2.8205, "step": 208750 }, { "epoch": 1.8439039898267366, "grad_norm": 3.2277307510375977, "learning_rate": 7.02134494576735e-05, "loss": 2.8286, "step": 208800 }, { "epoch": 1.8443455377170208, "grad_norm": 5.538561820983887, "learning_rate": 7.020073867371806e-05, "loss": 3.152, "step": 208850 }, { "epoch": 1.844787085607305, "grad_norm": 2.4161622524261475, "learning_rate": 7.018802632942687e-05, "loss": 3.0617, "step": 208900 }, { "epoch": 1.8452286334975891, "grad_norm": 3.8103973865509033, "learning_rate": 7.017531242578182e-05, "loss": 3.1433, "step": 208950 }, { "epoch": 1.8456701813878733, "grad_norm": 1.9754663705825806, "learning_rate": 7.016259696376496e-05, "loss": 3.0137, "step": 209000 }, { "epoch": 1.8461117292781575, "grad_norm": 3.691028594970703, "learning_rate": 7.014987994435846e-05, "loss": 3.0269, "step": 209050 }, { "epoch": 1.8465532771684416, "grad_norm": 4.685206413269043, "learning_rate": 7.013716136854462e-05, "loss": 3.7514, "step": 209100 }, { "epoch": 1.8469948250587258, "grad_norm": 4.822083473205566, "learning_rate": 7.012444123730579e-05, "loss": 3.11, "step": 209150 }, { "epoch": 1.84743637294901, "grad_norm": 2.390188694000244, "learning_rate": 7.011171955162455e-05, "loss": 3.4009, "step": 209200 }, { "epoch": 1.8478779208392941, "grad_norm": 2.3603127002716064, "learning_rate": 7.009899631248352e-05, "loss": 3.5394, "step": 209250 }, { "epoch": 1.8483194687295783, "grad_norm": 1.7594237327575684, "learning_rate": 7.008627152086545e-05, "loss": 3.0626, "step": 209300 }, { "epoch": 1.8487610166198625, "grad_norm": 4.551392555236816, "learning_rate": 7.007354517775323e-05, "loss": 3.2904, "step": 209350 }, { "epoch": 1.8492025645101466, "grad_norm": 3.563063144683838, "learning_rate": 7.006081728412985e-05, "loss": 3.3053, "step": 209400 }, { "epoch": 1.8496441124004308, "grad_norm": 2.135427236557007, "learning_rate": 7.004808784097847e-05, "loss": 3.666, "step": 209450 }, { "epoch": 1.850085660290715, "grad_norm": 2.4800546169281006, "learning_rate": 7.00353568492823e-05, "loss": 3.5286, "step": 209500 }, { "epoch": 1.8505272081809994, "grad_norm": 4.204072952270508, "learning_rate": 7.002262431002471e-05, "loss": 2.979, "step": 209550 }, { "epoch": 1.8509687560712835, "grad_norm": 2.9853248596191406, "learning_rate": 7.000989022418916e-05, "loss": 3.5786, "step": 209600 }, { "epoch": 1.8514103039615677, "grad_norm": 0.9208089709281921, "learning_rate": 6.999715459275929e-05, "loss": 3.2202, "step": 209650 }, { "epoch": 1.8518518518518519, "grad_norm": 1.4217525720596313, "learning_rate": 6.99844174167188e-05, "loss": 3.2799, "step": 209700 }, { "epoch": 1.852293399742136, "grad_norm": 0.9643871784210205, "learning_rate": 6.997167869705152e-05, "loss": 3.3573, "step": 209750 }, { "epoch": 1.8527349476324202, "grad_norm": 1.374295949935913, "learning_rate": 6.995893843474141e-05, "loss": 2.9871, "step": 209800 }, { "epoch": 1.8531764955227044, "grad_norm": 3.098726511001587, "learning_rate": 6.994619663077255e-05, "loss": 3.1085, "step": 209850 }, { "epoch": 1.8536180434129885, "grad_norm": 7.691214084625244, "learning_rate": 6.993345328612917e-05, "loss": 2.9589, "step": 209900 }, { "epoch": 1.854059591303273, "grad_norm": 2.3036842346191406, "learning_rate": 6.992070840179552e-05, "loss": 3.3949, "step": 209950 }, { "epoch": 1.854501139193557, "grad_norm": 2.4765408039093018, "learning_rate": 6.990796197875609e-05, "loss": 3.071, "step": 210000 }, { "epoch": 1.854501139193557, "eval_asr_loss": 0.9128160601105207, "eval_loss": 2.8701679706573486, "eval_runtime": 20.6971, "eval_samples_per_second": 37.107, "eval_steps_per_second": 9.277, "eval_tts_loss": 5.965158913742212, "step": 210000 }, { "epoch": 1.8549426870838412, "grad_norm": 5.095572471618652, "learning_rate": 6.989521401799539e-05, "loss": 2.8314, "step": 210050 }, { "epoch": 1.8553842349741254, "grad_norm": 1.9772043228149414, "learning_rate": 6.988246452049814e-05, "loss": 2.9511, "step": 210100 }, { "epoch": 1.8558257828644096, "grad_norm": 3.964059352874756, "learning_rate": 6.986971348724909e-05, "loss": 3.0983, "step": 210150 }, { "epoch": 1.8562673307546937, "grad_norm": 2.66196608543396, "learning_rate": 6.985696091923314e-05, "loss": 3.0646, "step": 210200 }, { "epoch": 1.856708878644978, "grad_norm": 3.449439764022827, "learning_rate": 6.984420681743537e-05, "loss": 3.2398, "step": 210250 }, { "epoch": 1.857150426535262, "grad_norm": 10.892763137817383, "learning_rate": 6.98314511828409e-05, "loss": 3.1401, "step": 210300 }, { "epoch": 1.8575919744255462, "grad_norm": 1.941422939300537, "learning_rate": 6.981869401643498e-05, "loss": 3.3163, "step": 210350 }, { "epoch": 1.8580335223158304, "grad_norm": 3.753326892852783, "learning_rate": 6.980593531920302e-05, "loss": 2.955, "step": 210400 }, { "epoch": 1.8584750702061146, "grad_norm": 3.6323447227478027, "learning_rate": 6.979317509213049e-05, "loss": 2.9434, "step": 210450 }, { "epoch": 1.8589166180963987, "grad_norm": 1.4818679094314575, "learning_rate": 6.978041333620303e-05, "loss": 2.8079, "step": 210500 }, { "epoch": 1.859358165986683, "grad_norm": 2.958521842956543, "learning_rate": 6.976765005240636e-05, "loss": 3.0063, "step": 210550 }, { "epoch": 1.859799713876967, "grad_norm": 5.629188537597656, "learning_rate": 6.975488524172637e-05, "loss": 3.156, "step": 210600 }, { "epoch": 1.8602412617672512, "grad_norm": 5.116648197174072, "learning_rate": 6.9742118905149e-05, "loss": 2.5961, "step": 210650 }, { "epoch": 1.8606828096575354, "grad_norm": 3.616987466812134, "learning_rate": 6.972935104366036e-05, "loss": 3.3869, "step": 210700 }, { "epoch": 1.8611243575478196, "grad_norm": 1.0495541095733643, "learning_rate": 6.971658165824665e-05, "loss": 3.5673, "step": 210750 }, { "epoch": 1.8615659054381037, "grad_norm": 4.416067600250244, "learning_rate": 6.970381074989419e-05, "loss": 2.5567, "step": 210800 }, { "epoch": 1.862007453328388, "grad_norm": 4.130253314971924, "learning_rate": 6.969103831958944e-05, "loss": 3.2104, "step": 210850 }, { "epoch": 1.862449001218672, "grad_norm": 2.2265453338623047, "learning_rate": 6.967826436831896e-05, "loss": 3.3314, "step": 210900 }, { "epoch": 1.8628905491089562, "grad_norm": 3.0271718502044678, "learning_rate": 6.966548889706941e-05, "loss": 3.1078, "step": 210950 }, { "epoch": 1.8633320969992404, "grad_norm": 5.183620452880859, "learning_rate": 6.96527119068276e-05, "loss": 2.9481, "step": 211000 }, { "epoch": 1.8637736448895246, "grad_norm": 1.8804371356964111, "learning_rate": 6.963993339858047e-05, "loss": 2.826, "step": 211050 }, { "epoch": 1.8642151927798087, "grad_norm": 2.7824251651763916, "learning_rate": 6.962715337331501e-05, "loss": 3.0962, "step": 211100 }, { "epoch": 1.8646567406700931, "grad_norm": 2.67588472366333, "learning_rate": 6.961437183201838e-05, "loss": 2.7954, "step": 211150 }, { "epoch": 1.8650982885603773, "grad_norm": 2.0343751907348633, "learning_rate": 6.960158877567786e-05, "loss": 3.0353, "step": 211200 }, { "epoch": 1.8655398364506615, "grad_norm": 2.9644811153411865, "learning_rate": 6.958880420528083e-05, "loss": 2.8633, "step": 211250 }, { "epoch": 1.8659813843409456, "grad_norm": 2.8709774017333984, "learning_rate": 6.957601812181478e-05, "loss": 3.2773, "step": 211300 }, { "epoch": 1.8664229322312298, "grad_norm": 2.6100618839263916, "learning_rate": 6.95632305262673e-05, "loss": 2.8437, "step": 211350 }, { "epoch": 1.866864480121514, "grad_norm": 2.5091559886932373, "learning_rate": 6.955044141962619e-05, "loss": 3.3564, "step": 211400 }, { "epoch": 1.8673060280117981, "grad_norm": 2.1142711639404297, "learning_rate": 6.953765080287925e-05, "loss": 3.5474, "step": 211450 }, { "epoch": 1.8677475759020823, "grad_norm": 1.718662977218628, "learning_rate": 6.952485867701446e-05, "loss": 3.3915, "step": 211500 }, { "epoch": 1.8681891237923667, "grad_norm": 1.8452847003936768, "learning_rate": 6.951206504301991e-05, "loss": 2.9542, "step": 211550 }, { "epoch": 1.8686306716826508, "grad_norm": 0.9762416481971741, "learning_rate": 6.949926990188378e-05, "loss": 3.5469, "step": 211600 }, { "epoch": 1.869072219572935, "grad_norm": 2.9833409786224365, "learning_rate": 6.948647325459441e-05, "loss": 3.4791, "step": 211650 }, { "epoch": 1.8695137674632192, "grad_norm": 3.5274078845977783, "learning_rate": 6.947367510214022e-05, "loss": 2.7129, "step": 211700 }, { "epoch": 1.8699553153535033, "grad_norm": 1.9630882740020752, "learning_rate": 6.946087544550975e-05, "loss": 3.2424, "step": 211750 }, { "epoch": 1.8703968632437875, "grad_norm": 1.6840051412582397, "learning_rate": 6.944807428569168e-05, "loss": 3.1344, "step": 211800 }, { "epoch": 1.8708384111340717, "grad_norm": 2.4087986946105957, "learning_rate": 6.943527162367481e-05, "loss": 3.6155, "step": 211850 }, { "epoch": 1.8712799590243558, "grad_norm": 2.1375205516815186, "learning_rate": 6.942246746044798e-05, "loss": 3.2264, "step": 211900 }, { "epoch": 1.87172150691464, "grad_norm": 1.4749163389205933, "learning_rate": 6.940966179700026e-05, "loss": 3.4048, "step": 211950 }, { "epoch": 1.8721630548049242, "grad_norm": 2.870462656021118, "learning_rate": 6.939685463432073e-05, "loss": 3.3072, "step": 212000 }, { "epoch": 1.8726046026952083, "grad_norm": 2.298576593399048, "learning_rate": 6.93840459733987e-05, "loss": 3.2295, "step": 212050 }, { "epoch": 1.8730461505854925, "grad_norm": 2.6228911876678467, "learning_rate": 6.937123581522345e-05, "loss": 3.0334, "step": 212100 }, { "epoch": 1.8734876984757767, "grad_norm": 3.790313243865967, "learning_rate": 6.935842416078451e-05, "loss": 3.6681, "step": 212150 }, { "epoch": 1.8739292463660608, "grad_norm": 1.6933039426803589, "learning_rate": 6.934561101107147e-05, "loss": 3.3708, "step": 212200 }, { "epoch": 1.874370794256345, "grad_norm": 1.3448114395141602, "learning_rate": 6.9332796367074e-05, "loss": 3.5045, "step": 212250 }, { "epoch": 1.8748123421466292, "grad_norm": 1.4509389400482178, "learning_rate": 6.931998022978197e-05, "loss": 3.2581, "step": 212300 }, { "epoch": 1.8752538900369133, "grad_norm": 2.475632667541504, "learning_rate": 6.930716260018529e-05, "loss": 3.5627, "step": 212350 }, { "epoch": 1.8756954379271975, "grad_norm": 1.6319446563720703, "learning_rate": 6.929434347927401e-05, "loss": 3.4894, "step": 212400 }, { "epoch": 1.8761369858174817, "grad_norm": 3.2146284580230713, "learning_rate": 6.928152286803833e-05, "loss": 3.3476, "step": 212450 }, { "epoch": 1.8765785337077658, "grad_norm": 2.51674747467041, "learning_rate": 6.926870076746848e-05, "loss": 3.1805, "step": 212500 }, { "epoch": 1.87702008159805, "grad_norm": 3.0522384643554688, "learning_rate": 6.92558771785549e-05, "loss": 3.1066, "step": 212550 }, { "epoch": 1.8774616294883342, "grad_norm": 4.367920875549316, "learning_rate": 6.924305210228812e-05, "loss": 3.4727, "step": 212600 }, { "epoch": 1.8779031773786183, "grad_norm": 1.442237138748169, "learning_rate": 6.923022553965873e-05, "loss": 2.9928, "step": 212650 }, { "epoch": 1.8783447252689025, "grad_norm": 5.329409599304199, "learning_rate": 6.921739749165747e-05, "loss": 2.8959, "step": 212700 }, { "epoch": 1.878786273159187, "grad_norm": 1.250346302986145, "learning_rate": 6.92045679592752e-05, "loss": 3.0655, "step": 212750 }, { "epoch": 1.879227821049471, "grad_norm": 3.5236244201660156, "learning_rate": 6.919173694350293e-05, "loss": 3.4609, "step": 212800 }, { "epoch": 1.8796693689397552, "grad_norm": 1.863847017288208, "learning_rate": 6.917890444533173e-05, "loss": 3.4643, "step": 212850 }, { "epoch": 1.8801109168300394, "grad_norm": 1.5934778451919556, "learning_rate": 6.916607046575276e-05, "loss": 2.9508, "step": 212900 }, { "epoch": 1.8805524647203236, "grad_norm": 1.6757519245147705, "learning_rate": 6.915323500575739e-05, "loss": 2.9168, "step": 212950 }, { "epoch": 1.8809940126106077, "grad_norm": 3.9492690563201904, "learning_rate": 6.914065481961739e-05, "loss": 3.1243, "step": 213000 }, { "epoch": 1.8809940126106077, "eval_asr_loss": 0.9141171246415224, "eval_loss": 2.869227647781372, "eval_runtime": 20.5884, "eval_samples_per_second": 37.303, "eval_steps_per_second": 9.326, "eval_tts_loss": 5.979077017942771, "step": 213000 }, { "epoch": 1.881435560500892, "grad_norm": 1.5101724863052368, "learning_rate": 6.912781643132254e-05, "loss": 3.2316, "step": 213050 }, { "epoch": 1.881877108391176, "grad_norm": 1.5922937393188477, "learning_rate": 6.911497656556606e-05, "loss": 3.423, "step": 213100 }, { "epoch": 1.8823186562814604, "grad_norm": 2.6681389808654785, "learning_rate": 6.910213522333973e-05, "loss": 3.3049, "step": 213150 }, { "epoch": 1.8827602041717446, "grad_norm": 3.4186484813690186, "learning_rate": 6.908929240563545e-05, "loss": 2.7473, "step": 213200 }, { "epoch": 1.8832017520620288, "grad_norm": 2.218273639678955, "learning_rate": 6.90764481134452e-05, "loss": 3.5811, "step": 213250 }, { "epoch": 1.883643299952313, "grad_norm": 3.6075308322906494, "learning_rate": 6.906360234776109e-05, "loss": 3.3159, "step": 213300 }, { "epoch": 1.884084847842597, "grad_norm": 3.542436122894287, "learning_rate": 6.905075510957536e-05, "loss": 3.2598, "step": 213350 }, { "epoch": 1.8845263957328813, "grad_norm": 3.3695287704467773, "learning_rate": 6.903790639988032e-05, "loss": 3.4699, "step": 213400 }, { "epoch": 1.8849679436231654, "grad_norm": 4.290796279907227, "learning_rate": 6.902505621966848e-05, "loss": 3.405, "step": 213450 }, { "epoch": 1.8854094915134496, "grad_norm": 2.021557331085205, "learning_rate": 6.901220456993236e-05, "loss": 3.3985, "step": 213500 }, { "epoch": 1.8858510394037338, "grad_norm": 2.5229990482330322, "learning_rate": 6.899935145166467e-05, "loss": 3.265, "step": 213550 }, { "epoch": 1.886292587294018, "grad_norm": 1.655785322189331, "learning_rate": 6.898649686585817e-05, "loss": 2.9395, "step": 213600 }, { "epoch": 1.886734135184302, "grad_norm": 1.8800694942474365, "learning_rate": 6.89736408135058e-05, "loss": 3.3818, "step": 213650 }, { "epoch": 1.8871756830745863, "grad_norm": 1.9261225461959839, "learning_rate": 6.896078329560056e-05, "loss": 3.5134, "step": 213700 }, { "epoch": 1.8876172309648704, "grad_norm": 2.3512516021728516, "learning_rate": 6.894792431313561e-05, "loss": 2.9406, "step": 213750 }, { "epoch": 1.8880587788551546, "grad_norm": 1.5226099491119385, "learning_rate": 6.893506386710415e-05, "loss": 2.9769, "step": 213800 }, { "epoch": 1.8885003267454388, "grad_norm": 2.3892409801483154, "learning_rate": 6.89222019584996e-05, "loss": 3.4092, "step": 213850 }, { "epoch": 1.888941874635723, "grad_norm": 1.6916769742965698, "learning_rate": 6.890933858831539e-05, "loss": 3.077, "step": 213900 }, { "epoch": 1.889383422526007, "grad_norm": 0.7223499417304993, "learning_rate": 6.889647375754512e-05, "loss": 3.2138, "step": 213950 }, { "epoch": 1.8898249704162913, "grad_norm": 2.6566531658172607, "learning_rate": 6.88836074671825e-05, "loss": 3.0725, "step": 214000 }, { "epoch": 1.8902665183065754, "grad_norm": 2.1201887130737305, "learning_rate": 6.887073971822131e-05, "loss": 3.2355, "step": 214050 }, { "epoch": 1.8907080661968596, "grad_norm": 2.0156757831573486, "learning_rate": 6.885787051165553e-05, "loss": 3.1942, "step": 214100 }, { "epoch": 1.8911496140871438, "grad_norm": 3.2175350189208984, "learning_rate": 6.884499984847914e-05, "loss": 3.2014, "step": 214150 }, { "epoch": 1.891591161977428, "grad_norm": 3.1125988960266113, "learning_rate": 6.88321277296863e-05, "loss": 2.7858, "step": 214200 }, { "epoch": 1.892032709867712, "grad_norm": 2.6365787982940674, "learning_rate": 6.881925415627131e-05, "loss": 2.9779, "step": 214250 }, { "epoch": 1.8924742577579963, "grad_norm": 4.814088821411133, "learning_rate": 6.880637912922852e-05, "loss": 2.564, "step": 214300 }, { "epoch": 1.8929158056482807, "grad_norm": 2.180482864379883, "learning_rate": 6.87935026495524e-05, "loss": 3.4417, "step": 214350 }, { "epoch": 1.8933573535385648, "grad_norm": 3.2703518867492676, "learning_rate": 6.878062471823756e-05, "loss": 3.5152, "step": 214400 }, { "epoch": 1.893798901428849, "grad_norm": 2.8934197425842285, "learning_rate": 6.876774533627873e-05, "loss": 3.3839, "step": 214450 }, { "epoch": 1.8942404493191332, "grad_norm": 2.586165189743042, "learning_rate": 6.875486450467073e-05, "loss": 3.1851, "step": 214500 }, { "epoch": 1.8946819972094173, "grad_norm": 4.4620232582092285, "learning_rate": 6.874198222440845e-05, "loss": 3.3091, "step": 214550 }, { "epoch": 1.8951235450997015, "grad_norm": 2.581688642501831, "learning_rate": 6.8729098496487e-05, "loss": 2.9032, "step": 214600 }, { "epoch": 1.8955650929899857, "grad_norm": 4.92921781539917, "learning_rate": 6.87162133219015e-05, "loss": 2.9231, "step": 214650 }, { "epoch": 1.8960066408802698, "grad_norm": 2.6516809463500977, "learning_rate": 6.870332670164722e-05, "loss": 3.3176, "step": 214700 }, { "epoch": 1.8964481887705542, "grad_norm": 3.65873122215271, "learning_rate": 6.869043863671955e-05, "loss": 3.1889, "step": 214750 }, { "epoch": 1.8968897366608384, "grad_norm": 5.168747901916504, "learning_rate": 6.8677549128114e-05, "loss": 3.8415, "step": 214800 }, { "epoch": 1.8973312845511225, "grad_norm": 20.138532638549805, "learning_rate": 6.866465817682616e-05, "loss": 3.2379, "step": 214850 }, { "epoch": 1.8977728324414067, "grad_norm": 1.795832633972168, "learning_rate": 6.865176578385175e-05, "loss": 2.9264, "step": 214900 }, { "epoch": 1.8982143803316909, "grad_norm": 4.119626998901367, "learning_rate": 6.863887195018658e-05, "loss": 3.3952, "step": 214950 }, { "epoch": 1.898655928221975, "grad_norm": 0.8645926117897034, "learning_rate": 6.862597667682661e-05, "loss": 3.247, "step": 215000 }, { "epoch": 1.8990974761122592, "grad_norm": 1.5755289793014526, "learning_rate": 6.86130799647679e-05, "loss": 3.2569, "step": 215050 }, { "epoch": 1.8995390240025434, "grad_norm": 2.1388795375823975, "learning_rate": 6.860018181500659e-05, "loss": 3.1373, "step": 215100 }, { "epoch": 1.8999805718928275, "grad_norm": 2.395258665084839, "learning_rate": 6.858728222853895e-05, "loss": 3.3231, "step": 215150 }, { "epoch": 1.9004221197831117, "grad_norm": 2.154432773590088, "learning_rate": 6.857438120636137e-05, "loss": 3.3407, "step": 215200 }, { "epoch": 1.9008636676733959, "grad_norm": 4.255756378173828, "learning_rate": 6.856147874947036e-05, "loss": 3.256, "step": 215250 }, { "epoch": 1.90130521556368, "grad_norm": 4.8996357917785645, "learning_rate": 6.854857485886252e-05, "loss": 3.2052, "step": 215300 }, { "epoch": 1.9017467634539642, "grad_norm": 3.192509412765503, "learning_rate": 6.853566953553454e-05, "loss": 3.4194, "step": 215350 }, { "epoch": 1.9021883113442484, "grad_norm": 4.064058303833008, "learning_rate": 6.852276278048329e-05, "loss": 3.2169, "step": 215400 }, { "epoch": 1.9026298592345325, "grad_norm": 2.9588820934295654, "learning_rate": 6.850985459470567e-05, "loss": 3.0224, "step": 215450 }, { "epoch": 1.9030714071248167, "grad_norm": 4.584090709686279, "learning_rate": 6.849694497919874e-05, "loss": 3.308, "step": 215500 }, { "epoch": 1.9035129550151009, "grad_norm": 2.459688186645508, "learning_rate": 6.848403393495966e-05, "loss": 2.8452, "step": 215550 }, { "epoch": 1.903954502905385, "grad_norm": 3.800164222717285, "learning_rate": 6.847112146298568e-05, "loss": 2.9931, "step": 215600 }, { "epoch": 1.9043960507956692, "grad_norm": 3.536306858062744, "learning_rate": 6.845820756427424e-05, "loss": 3.2442, "step": 215650 }, { "epoch": 1.9048375986859534, "grad_norm": 3.559290647506714, "learning_rate": 6.844529223982275e-05, "loss": 2.9447, "step": 215700 }, { "epoch": 1.9052791465762375, "grad_norm": 4.145590305328369, "learning_rate": 6.843237549062886e-05, "loss": 3.1105, "step": 215750 }, { "epoch": 1.9057206944665217, "grad_norm": 0.8923916220664978, "learning_rate": 6.841945731769028e-05, "loss": 2.9162, "step": 215800 }, { "epoch": 1.9061622423568059, "grad_norm": 3.795706272125244, "learning_rate": 6.840653772200479e-05, "loss": 3.0548, "step": 215850 }, { "epoch": 1.90660379024709, "grad_norm": 2.7242794036865234, "learning_rate": 6.839361670457037e-05, "loss": 2.8588, "step": 215900 }, { "epoch": 1.9070453381373744, "grad_norm": 2.8802475929260254, "learning_rate": 6.8380694266385e-05, "loss": 2.8865, "step": 215950 }, { "epoch": 1.9074868860276586, "grad_norm": 2.9323482513427734, "learning_rate": 6.836777040844688e-05, "loss": 3.0616, "step": 216000 }, { "epoch": 1.9074868860276586, "eval_asr_loss": 0.9128535109015836, "eval_loss": 2.869661331176758, "eval_runtime": 20.623, "eval_samples_per_second": 37.24, "eval_steps_per_second": 9.31, "eval_tts_loss": 5.943447776517638, "step": 216000 }, { "epoch": 1.9079284339179428, "grad_norm": 1.7712619304656982, "learning_rate": 6.835484513175427e-05, "loss": 3.6614, "step": 216050 }, { "epoch": 1.908369981808227, "grad_norm": 4.220677852630615, "learning_rate": 6.834191843730549e-05, "loss": 3.0352, "step": 216100 }, { "epoch": 1.908811529698511, "grad_norm": 3.0093469619750977, "learning_rate": 6.832899032609904e-05, "loss": 3.3183, "step": 216150 }, { "epoch": 1.9092530775887953, "grad_norm": 1.6805152893066406, "learning_rate": 6.831606079913352e-05, "loss": 3.3335, "step": 216200 }, { "epoch": 1.9096946254790794, "grad_norm": 5.431236267089844, "learning_rate": 6.830312985740764e-05, "loss": 3.4567, "step": 216250 }, { "epoch": 1.9101361733693636, "grad_norm": 1.6891889572143555, "learning_rate": 6.829019750192016e-05, "loss": 2.7522, "step": 216300 }, { "epoch": 1.910577721259648, "grad_norm": 3.535320997238159, "learning_rate": 6.827726373367003e-05, "loss": 3.6283, "step": 216350 }, { "epoch": 1.9110192691499321, "grad_norm": 5.188021183013916, "learning_rate": 6.826432855365626e-05, "loss": 3.189, "step": 216400 }, { "epoch": 1.9114608170402163, "grad_norm": 3.6246254444122314, "learning_rate": 6.825165070851258e-05, "loss": 3.0975, "step": 216450 }, { "epoch": 1.9119023649305005, "grad_norm": 4.558775901794434, "learning_rate": 6.823871273615455e-05, "loss": 3.5297, "step": 216500 }, { "epoch": 1.9123439128207846, "grad_norm": 3.1083285808563232, "learning_rate": 6.822577335501065e-05, "loss": 3.217, "step": 216550 }, { "epoch": 1.9127854607110688, "grad_norm": 2.8470730781555176, "learning_rate": 6.821283256608029e-05, "loss": 3.5633, "step": 216600 }, { "epoch": 1.913227008601353, "grad_norm": 2.6485366821289062, "learning_rate": 6.819989037036303e-05, "loss": 3.0846, "step": 216650 }, { "epoch": 1.9136685564916371, "grad_norm": 4.975265026092529, "learning_rate": 6.81869467688586e-05, "loss": 2.9518, "step": 216700 }, { "epoch": 1.9141101043819213, "grad_norm": 3.9095828533172607, "learning_rate": 6.817400176256674e-05, "loss": 3.0829, "step": 216750 }, { "epoch": 1.9145516522722055, "grad_norm": 2.3637115955352783, "learning_rate": 6.816105535248735e-05, "loss": 3.1916, "step": 216800 }, { "epoch": 1.9149932001624896, "grad_norm": 1.6925334930419922, "learning_rate": 6.814810753962045e-05, "loss": 3.2711, "step": 216850 }, { "epoch": 1.9154347480527738, "grad_norm": 1.323299527168274, "learning_rate": 6.813515832496613e-05, "loss": 3.3585, "step": 216900 }, { "epoch": 1.915876295943058, "grad_norm": 4.358603000640869, "learning_rate": 6.812220770952462e-05, "loss": 3.396, "step": 216950 }, { "epoch": 1.9163178438333421, "grad_norm": 2.25028920173645, "learning_rate": 6.810925569429622e-05, "loss": 3.6563, "step": 217000 }, { "epoch": 1.9167593917236263, "grad_norm": 1.2531156539916992, "learning_rate": 6.809630228028138e-05, "loss": 3.1924, "step": 217050 }, { "epoch": 1.9172009396139105, "grad_norm": 2.4231159687042236, "learning_rate": 6.808334746848064e-05, "loss": 3.3104, "step": 217100 }, { "epoch": 1.9176424875041946, "grad_norm": 4.368786334991455, "learning_rate": 6.807039125989466e-05, "loss": 3.348, "step": 217150 }, { "epoch": 1.9180840353944788, "grad_norm": 2.5258097648620605, "learning_rate": 6.80574336555242e-05, "loss": 2.6929, "step": 217200 }, { "epoch": 1.918525583284763, "grad_norm": 3.867431163787842, "learning_rate": 6.804447465637008e-05, "loss": 3.1379, "step": 217250 }, { "epoch": 1.9189671311750471, "grad_norm": 1.6516950130462646, "learning_rate": 6.803151426343331e-05, "loss": 3.2439, "step": 217300 }, { "epoch": 1.9194086790653313, "grad_norm": 1.0718307495117188, "learning_rate": 6.8018552477715e-05, "loss": 3.08, "step": 217350 }, { "epoch": 1.9198502269556155, "grad_norm": 1.568926453590393, "learning_rate": 6.800558930021627e-05, "loss": 3.207, "step": 217400 }, { "epoch": 1.9202917748458996, "grad_norm": 2.034952402114868, "learning_rate": 6.799262473193846e-05, "loss": 2.7577, "step": 217450 }, { "epoch": 1.9207333227361838, "grad_norm": 1.5532232522964478, "learning_rate": 6.797965877388297e-05, "loss": 3.395, "step": 217500 }, { "epoch": 1.9211748706264682, "grad_norm": 2.738466739654541, "learning_rate": 6.79666914270513e-05, "loss": 3.2612, "step": 217550 }, { "epoch": 1.9216164185167524, "grad_norm": 1.9071691036224365, "learning_rate": 6.795372269244506e-05, "loss": 3.145, "step": 217600 }, { "epoch": 1.9220579664070365, "grad_norm": 4.6840596199035645, "learning_rate": 6.794075257106599e-05, "loss": 3.3608, "step": 217650 }, { "epoch": 1.9224995142973207, "grad_norm": 1.42924165725708, "learning_rate": 6.792778106391591e-05, "loss": 2.9166, "step": 217700 }, { "epoch": 1.9229410621876049, "grad_norm": 3.003345012664795, "learning_rate": 6.79148081719968e-05, "loss": 3.1679, "step": 217750 }, { "epoch": 1.923382610077889, "grad_norm": 2.117689609527588, "learning_rate": 6.790183389631065e-05, "loss": 3.0708, "step": 217800 }, { "epoch": 1.9238241579681732, "grad_norm": 1.345949411392212, "learning_rate": 6.788885823785962e-05, "loss": 3.379, "step": 217850 }, { "epoch": 1.9242657058584574, "grad_norm": 2.085387945175171, "learning_rate": 6.787588119764599e-05, "loss": 3.3116, "step": 217900 }, { "epoch": 1.9247072537487417, "grad_norm": 1.578888177871704, "learning_rate": 6.786290277667214e-05, "loss": 3.804, "step": 217950 }, { "epoch": 1.925148801639026, "grad_norm": 2.3243682384490967, "learning_rate": 6.78499229759405e-05, "loss": 3.2998, "step": 218000 }, { "epoch": 1.92559034952931, "grad_norm": 1.385135293006897, "learning_rate": 6.783694179645368e-05, "loss": 3.2463, "step": 218050 }, { "epoch": 1.9260318974195942, "grad_norm": 4.232430934906006, "learning_rate": 6.782395923921436e-05, "loss": 3.0034, "step": 218100 }, { "epoch": 1.9264734453098784, "grad_norm": 0.8287903070449829, "learning_rate": 6.781097530522534e-05, "loss": 3.0441, "step": 218150 }, { "epoch": 1.9269149932001626, "grad_norm": 1.6629900932312012, "learning_rate": 6.779798999548949e-05, "loss": 3.075, "step": 218200 }, { "epoch": 1.9273565410904467, "grad_norm": 6.08732795715332, "learning_rate": 6.778500331100985e-05, "loss": 3.3407, "step": 218250 }, { "epoch": 1.927798088980731, "grad_norm": 2.5804171562194824, "learning_rate": 6.777201525278954e-05, "loss": 3.2754, "step": 218300 }, { "epoch": 1.928239636871015, "grad_norm": 2.3154220581054688, "learning_rate": 6.77590258218317e-05, "loss": 3.1724, "step": 218350 }, { "epoch": 1.9286811847612992, "grad_norm": 2.769705057144165, "learning_rate": 6.774603501913975e-05, "loss": 3.3089, "step": 218400 }, { "epoch": 1.9291227326515834, "grad_norm": 1.773826241493225, "learning_rate": 6.773304284571707e-05, "loss": 3.05, "step": 218450 }, { "epoch": 1.9295642805418676, "grad_norm": 1.2457982301712036, "learning_rate": 6.77200493025672e-05, "loss": 2.9031, "step": 218500 }, { "epoch": 1.9300058284321517, "grad_norm": 4.650148391723633, "learning_rate": 6.770705439069377e-05, "loss": 3.1762, "step": 218550 }, { "epoch": 1.930447376322436, "grad_norm": 4.885578632354736, "learning_rate": 6.769405811110055e-05, "loss": 2.9341, "step": 218600 }, { "epoch": 1.93088892421272, "grad_norm": 5.776822090148926, "learning_rate": 6.768106046479138e-05, "loss": 3.1056, "step": 218650 }, { "epoch": 1.9313304721030042, "grad_norm": 3.6868488788604736, "learning_rate": 6.766806145277023e-05, "loss": 3.2905, "step": 218700 }, { "epoch": 1.9317720199932884, "grad_norm": 2.8849265575408936, "learning_rate": 6.765506107604113e-05, "loss": 3.3836, "step": 218750 }, { "epoch": 1.9322135678835726, "grad_norm": 2.124006986618042, "learning_rate": 6.764205933560829e-05, "loss": 3.4177, "step": 218800 }, { "epoch": 1.9326551157738567, "grad_norm": 7.862915992736816, "learning_rate": 6.762905623247596e-05, "loss": 2.9393, "step": 218850 }, { "epoch": 1.933096663664141, "grad_norm": 3.948845624923706, "learning_rate": 6.761605176764854e-05, "loss": 3.3315, "step": 218900 }, { "epoch": 1.933538211554425, "grad_norm": 0.9531094431877136, "learning_rate": 6.760304594213048e-05, "loss": 3.3055, "step": 218950 }, { "epoch": 1.9339797594447092, "grad_norm": 1.7013096809387207, "learning_rate": 6.75900387569264e-05, "loss": 3.2939, "step": 219000 }, { "epoch": 1.9339797594447092, "eval_asr_loss": 0.922124789094716, "eval_loss": 2.875542402267456, "eval_runtime": 20.7112, "eval_samples_per_second": 37.081, "eval_steps_per_second": 9.27, "eval_tts_loss": 5.956120436386516, "step": 219000 }, { "epoch": 1.9344213073349934, "grad_norm": 5.030139446258545, "learning_rate": 6.757703021304099e-05, "loss": 3.032, "step": 219050 }, { "epoch": 1.9348628552252778, "grad_norm": 3.529791831970215, "learning_rate": 6.756402031147905e-05, "loss": 2.8338, "step": 219100 }, { "epoch": 1.935304403115562, "grad_norm": 2.3809285163879395, "learning_rate": 6.755100905324548e-05, "loss": 3.3668, "step": 219150 }, { "epoch": 1.9357459510058461, "grad_norm": 4.645364284515381, "learning_rate": 6.753799643934526e-05, "loss": 3.2265, "step": 219200 }, { "epoch": 1.9361874988961303, "grad_norm": 3.215935468673706, "learning_rate": 6.752498247078355e-05, "loss": 3.178, "step": 219250 }, { "epoch": 1.9366290467864145, "grad_norm": 4.604817867279053, "learning_rate": 6.751196714856556e-05, "loss": 3.4193, "step": 219300 }, { "epoch": 1.9370705946766986, "grad_norm": 5.685570240020752, "learning_rate": 6.74989504736966e-05, "loss": 3.0059, "step": 219350 }, { "epoch": 1.9375121425669828, "grad_norm": 2.7423245906829834, "learning_rate": 6.74859324471821e-05, "loss": 2.7509, "step": 219400 }, { "epoch": 1.937953690457267, "grad_norm": 2.651466131210327, "learning_rate": 6.747291307002759e-05, "loss": 3.3804, "step": 219450 }, { "epoch": 1.9383952383475513, "grad_norm": 0.9820277690887451, "learning_rate": 6.745989234323872e-05, "loss": 3.3264, "step": 219500 }, { "epoch": 1.9388367862378355, "grad_norm": 1.3179621696472168, "learning_rate": 6.744687026782123e-05, "loss": 3.0364, "step": 219550 }, { "epoch": 1.9392783341281197, "grad_norm": 3.5786843299865723, "learning_rate": 6.743384684478093e-05, "loss": 2.9091, "step": 219600 }, { "epoch": 1.9397198820184038, "grad_norm": 4.74429178237915, "learning_rate": 6.742082207512381e-05, "loss": 3.3593, "step": 219650 }, { "epoch": 1.940161429908688, "grad_norm": 2.6699705123901367, "learning_rate": 6.740779595985591e-05, "loss": 3.6889, "step": 219700 }, { "epoch": 1.9406029777989722, "grad_norm": 2.328733205795288, "learning_rate": 6.73947684999834e-05, "loss": 3.5794, "step": 219750 }, { "epoch": 1.9410445256892563, "grad_norm": 2.218416213989258, "learning_rate": 6.738173969651251e-05, "loss": 3.1906, "step": 219800 }, { "epoch": 1.9414860735795405, "grad_norm": 1.3207190036773682, "learning_rate": 6.736870955044963e-05, "loss": 3.1299, "step": 219850 }, { "epoch": 1.9419276214698247, "grad_norm": 2.481778144836426, "learning_rate": 6.735567806280121e-05, "loss": 2.9451, "step": 219900 }, { "epoch": 1.9423691693601088, "grad_norm": 2.82830548286438, "learning_rate": 6.734264523457383e-05, "loss": 3.3885, "step": 219950 }, { "epoch": 1.942810717250393, "grad_norm": 1.681307315826416, "learning_rate": 6.732987176325148e-05, "loss": 3.4272, "step": 220000 }, { "epoch": 1.9432522651406772, "grad_norm": 4.030539035797119, "learning_rate": 6.731683628364775e-05, "loss": 3.3767, "step": 220050 }, { "epoch": 1.9436938130309613, "grad_norm": 2.7558698654174805, "learning_rate": 6.730379946646527e-05, "loss": 3.6885, "step": 220100 }, { "epoch": 1.9441353609212455, "grad_norm": 2.890310525894165, "learning_rate": 6.729076131271102e-05, "loss": 3.3828, "step": 220150 }, { "epoch": 1.9445769088115297, "grad_norm": 1.815947413444519, "learning_rate": 6.727772182339209e-05, "loss": 3.3614, "step": 220200 }, { "epoch": 1.9450184567018138, "grad_norm": 1.3657796382904053, "learning_rate": 6.726468099951563e-05, "loss": 3.1284, "step": 220250 }, { "epoch": 1.945460004592098, "grad_norm": 2.1993958950042725, "learning_rate": 6.725163884208898e-05, "loss": 3.0117, "step": 220300 }, { "epoch": 1.9459015524823822, "grad_norm": 3.3096845149993896, "learning_rate": 6.723859535211951e-05, "loss": 3.5753, "step": 220350 }, { "epoch": 1.9463431003726663, "grad_norm": 2.9292962551116943, "learning_rate": 6.722555053061474e-05, "loss": 3.0053, "step": 220400 }, { "epoch": 1.9467846482629505, "grad_norm": 1.084033727645874, "learning_rate": 6.721250437858225e-05, "loss": 2.8641, "step": 220450 }, { "epoch": 1.9472261961532347, "grad_norm": 1.6132060289382935, "learning_rate": 6.719945689702975e-05, "loss": 3.2127, "step": 220500 }, { "epoch": 1.9476677440435188, "grad_norm": 3.2746384143829346, "learning_rate": 6.718640808696508e-05, "loss": 2.9314, "step": 220550 }, { "epoch": 1.948109291933803, "grad_norm": 2.0202183723449707, "learning_rate": 6.71733579493961e-05, "loss": 3.4692, "step": 220600 }, { "epoch": 1.9485508398240872, "grad_norm": 1.8772021532058716, "learning_rate": 6.716030648533085e-05, "loss": 2.9944, "step": 220650 }, { "epoch": 1.9489923877143716, "grad_norm": 2.3999502658843994, "learning_rate": 6.714725369577744e-05, "loss": 2.832, "step": 220700 }, { "epoch": 1.9494339356046557, "grad_norm": 4.706568241119385, "learning_rate": 6.713419958174407e-05, "loss": 3.0989, "step": 220750 }, { "epoch": 1.9498754834949399, "grad_norm": 4.1655097007751465, "learning_rate": 6.712114414423909e-05, "loss": 2.9818, "step": 220800 }, { "epoch": 1.950317031385224, "grad_norm": 3.7588179111480713, "learning_rate": 6.710808738427092e-05, "loss": 3.2759, "step": 220850 }, { "epoch": 1.9507585792755082, "grad_norm": 1.6095143556594849, "learning_rate": 6.709502930284805e-05, "loss": 3.4051, "step": 220900 }, { "epoch": 1.9512001271657924, "grad_norm": 2.343217372894287, "learning_rate": 6.708196990097914e-05, "loss": 3.5032, "step": 220950 }, { "epoch": 1.9516416750560766, "grad_norm": 3.6511197090148926, "learning_rate": 6.70689091796729e-05, "loss": 3.2668, "step": 221000 }, { "epoch": 1.9520832229463607, "grad_norm": 4.484455585479736, "learning_rate": 6.705584713993817e-05, "loss": 3.2429, "step": 221050 }, { "epoch": 1.952524770836645, "grad_norm": 1.386128306388855, "learning_rate": 6.704278378278387e-05, "loss": 3.1461, "step": 221100 }, { "epoch": 1.9529663187269293, "grad_norm": 1.4120815992355347, "learning_rate": 6.702971910921904e-05, "loss": 3.2366, "step": 221150 }, { "epoch": 1.9534078666172134, "grad_norm": 1.724896788597107, "learning_rate": 6.701665312025284e-05, "loss": 3.384, "step": 221200 }, { "epoch": 1.9538494145074976, "grad_norm": 0.8912140727043152, "learning_rate": 6.700358581689446e-05, "loss": 3.071, "step": 221250 }, { "epoch": 1.9542909623977818, "grad_norm": 5.190890312194824, "learning_rate": 6.699051720015326e-05, "loss": 3.1861, "step": 221300 }, { "epoch": 1.954732510288066, "grad_norm": 3.6501457691192627, "learning_rate": 6.697744727103871e-05, "loss": 2.9187, "step": 221350 }, { "epoch": 1.95517405817835, "grad_norm": 2.820183277130127, "learning_rate": 6.696437603056029e-05, "loss": 3.0877, "step": 221400 }, { "epoch": 1.9556156060686343, "grad_norm": 5.598944664001465, "learning_rate": 6.695130347972769e-05, "loss": 3.1602, "step": 221450 }, { "epoch": 1.9560571539589184, "grad_norm": 2.189221143722534, "learning_rate": 6.693822961955063e-05, "loss": 2.9852, "step": 221500 }, { "epoch": 1.9564987018492026, "grad_norm": 2.1708157062530518, "learning_rate": 6.6925154451039e-05, "loss": 3.4098, "step": 221550 }, { "epoch": 1.9569402497394868, "grad_norm": 1.5089625120162964, "learning_rate": 6.691207797520267e-05, "loss": 3.1213, "step": 221600 }, { "epoch": 1.957381797629771, "grad_norm": 2.335033416748047, "learning_rate": 6.689900019305174e-05, "loss": 3.354, "step": 221650 }, { "epoch": 1.957823345520055, "grad_norm": 3.1629550457000732, "learning_rate": 6.688592110559635e-05, "loss": 3.287, "step": 221700 }, { "epoch": 1.9582648934103393, "grad_norm": 6.7626142501831055, "learning_rate": 6.687284071384674e-05, "loss": 3.0697, "step": 221750 }, { "epoch": 1.9587064413006234, "grad_norm": 2.335103988647461, "learning_rate": 6.685975901881325e-05, "loss": 3.7996, "step": 221800 }, { "epoch": 1.9591479891909076, "grad_norm": 4.960870265960693, "learning_rate": 6.684667602150636e-05, "loss": 3.2685, "step": 221850 }, { "epoch": 1.9595895370811918, "grad_norm": 2.5751452445983887, "learning_rate": 6.683359172293659e-05, "loss": 3.0824, "step": 221900 }, { "epoch": 1.960031084971476, "grad_norm": 1.5638737678527832, "learning_rate": 6.68205061241146e-05, "loss": 3.1128, "step": 221950 }, { "epoch": 1.96047263286176, "grad_norm": 2.9490702152252197, "learning_rate": 6.680741922605116e-05, "loss": 3.1104, "step": 222000 }, { "epoch": 1.96047263286176, "eval_asr_loss": 0.9345355808110474, "eval_loss": 2.8736743927001953, "eval_runtime": 20.8897, "eval_samples_per_second": 36.765, "eval_steps_per_second": 9.191, "eval_tts_loss": 5.937240915996316, "step": 222000 }, { "epoch": 1.9609141807520443, "grad_norm": 1.6358108520507812, "learning_rate": 6.67943310297571e-05, "loss": 3.041, "step": 222050 }, { "epoch": 1.9613557286423284, "grad_norm": 3.1550421714782715, "learning_rate": 6.678124153624338e-05, "loss": 2.9968, "step": 222100 }, { "epoch": 1.9617972765326126, "grad_norm": 4.3898091316223145, "learning_rate": 6.676815074652106e-05, "loss": 2.8248, "step": 222150 }, { "epoch": 1.9622388244228968, "grad_norm": 2.0046401023864746, "learning_rate": 6.675505866160127e-05, "loss": 3.1247, "step": 222200 }, { "epoch": 1.962680372313181, "grad_norm": 4.28106689453125, "learning_rate": 6.674196528249528e-05, "loss": 3.3193, "step": 222250 }, { "epoch": 1.9631219202034653, "grad_norm": 4.919691562652588, "learning_rate": 6.672887061021444e-05, "loss": 3.5789, "step": 222300 }, { "epoch": 1.9635634680937495, "grad_norm": 3.231856346130371, "learning_rate": 6.671577464577022e-05, "loss": 2.8704, "step": 222350 }, { "epoch": 1.9640050159840337, "grad_norm": 3.4530904293060303, "learning_rate": 6.670267739017413e-05, "loss": 2.9751, "step": 222400 }, { "epoch": 1.9644465638743178, "grad_norm": 1.820241093635559, "learning_rate": 6.668957884443786e-05, "loss": 3.6656, "step": 222450 }, { "epoch": 1.964888111764602, "grad_norm": 4.895803928375244, "learning_rate": 6.667674101889736e-05, "loss": 2.7195, "step": 222500 }, { "epoch": 1.9653296596548862, "grad_norm": 1.803472876548767, "learning_rate": 6.666363992166847e-05, "loss": 3.4702, "step": 222550 }, { "epoch": 1.9657712075451703, "grad_norm": 4.227266788482666, "learning_rate": 6.665053753731469e-05, "loss": 3.2673, "step": 222600 }, { "epoch": 1.9662127554354545, "grad_norm": 2.4153201580047607, "learning_rate": 6.66374338668481e-05, "loss": 3.3712, "step": 222650 }, { "epoch": 1.9666543033257389, "grad_norm": 4.17147159576416, "learning_rate": 6.66243289112808e-05, "loss": 3.1089, "step": 222700 }, { "epoch": 1.967095851216023, "grad_norm": 1.907220721244812, "learning_rate": 6.661122267162508e-05, "loss": 3.0767, "step": 222750 }, { "epoch": 1.9675373991063072, "grad_norm": 3.343963861465454, "learning_rate": 6.659811514889324e-05, "loss": 3.0645, "step": 222800 }, { "epoch": 1.9679789469965914, "grad_norm": 2.7508366107940674, "learning_rate": 6.658500634409779e-05, "loss": 3.276, "step": 222850 }, { "epoch": 1.9684204948868755, "grad_norm": 6.333864212036133, "learning_rate": 6.657189625825122e-05, "loss": 2.9536, "step": 222900 }, { "epoch": 1.9688620427771597, "grad_norm": 4.854486465454102, "learning_rate": 6.655878489236617e-05, "loss": 3.2436, "step": 222950 }, { "epoch": 1.9693035906674439, "grad_norm": 2.8148059844970703, "learning_rate": 6.654567224745542e-05, "loss": 3.1323, "step": 223000 }, { "epoch": 1.969745138557728, "grad_norm": 3.6999666690826416, "learning_rate": 6.653255832453181e-05, "loss": 3.123, "step": 223050 }, { "epoch": 1.9701866864480122, "grad_norm": 4.362318515777588, "learning_rate": 6.651944312460823e-05, "loss": 3.1069, "step": 223100 }, { "epoch": 1.9706282343382964, "grad_norm": 4.602596759796143, "learning_rate": 6.650632664869779e-05, "loss": 2.7696, "step": 223150 }, { "epoch": 1.9710697822285805, "grad_norm": 4.410917282104492, "learning_rate": 6.649320889781356e-05, "loss": 2.7429, "step": 223200 }, { "epoch": 1.9715113301188647, "grad_norm": 2.3495771884918213, "learning_rate": 6.648008987296878e-05, "loss": 3.1646, "step": 223250 }, { "epoch": 1.9719528780091489, "grad_norm": 4.457131385803223, "learning_rate": 6.646696957517684e-05, "loss": 3.0327, "step": 223300 }, { "epoch": 1.972394425899433, "grad_norm": 3.8774821758270264, "learning_rate": 6.645384800545112e-05, "loss": 3.4975, "step": 223350 }, { "epoch": 1.9728359737897172, "grad_norm": 1.317435622215271, "learning_rate": 6.644072516480517e-05, "loss": 3.2134, "step": 223400 }, { "epoch": 1.9732775216800014, "grad_norm": 0.6410670280456543, "learning_rate": 6.642760105425261e-05, "loss": 2.9508, "step": 223450 }, { "epoch": 1.9737190695702855, "grad_norm": 1.857003927230835, "learning_rate": 6.641447567480717e-05, "loss": 2.7722, "step": 223500 }, { "epoch": 1.9741606174605697, "grad_norm": 3.14223575592041, "learning_rate": 6.640134902748267e-05, "loss": 3.4729, "step": 223550 }, { "epoch": 1.9746021653508539, "grad_norm": 2.031442165374756, "learning_rate": 6.638822111329304e-05, "loss": 3.0931, "step": 223600 }, { "epoch": 1.975043713241138, "grad_norm": 1.728389024734497, "learning_rate": 6.637509193325231e-05, "loss": 3.4318, "step": 223650 }, { "epoch": 1.9754852611314222, "grad_norm": 3.0955426692962646, "learning_rate": 6.636196148837456e-05, "loss": 3.1537, "step": 223700 }, { "epoch": 1.9759268090217064, "grad_norm": 3.6021342277526855, "learning_rate": 6.634882977967406e-05, "loss": 3.1403, "step": 223750 }, { "epoch": 1.9763683569119905, "grad_norm": 4.108619213104248, "learning_rate": 6.633569680816509e-05, "loss": 3.1472, "step": 223800 }, { "epoch": 1.9768099048022747, "grad_norm": 3.5418221950531006, "learning_rate": 6.632256257486204e-05, "loss": 3.197, "step": 223850 }, { "epoch": 1.977251452692559, "grad_norm": 2.8276524543762207, "learning_rate": 6.630942708077947e-05, "loss": 3.3344, "step": 223900 }, { "epoch": 1.9776930005828433, "grad_norm": 3.096320867538452, "learning_rate": 6.629629032693195e-05, "loss": 2.9784, "step": 223950 }, { "epoch": 1.9781345484731274, "grad_norm": 1.5722672939300537, "learning_rate": 6.628315231433418e-05, "loss": 3.1677, "step": 224000 }, { "epoch": 1.9785760963634116, "grad_norm": 2.9355618953704834, "learning_rate": 6.627001304400098e-05, "loss": 2.979, "step": 224050 }, { "epoch": 1.9790176442536958, "grad_norm": 3.031902313232422, "learning_rate": 6.625687251694721e-05, "loss": 2.9146, "step": 224100 }, { "epoch": 1.97945919214398, "grad_norm": 2.6479899883270264, "learning_rate": 6.624373073418793e-05, "loss": 2.7404, "step": 224150 }, { "epoch": 1.979900740034264, "grad_norm": 2.275542974472046, "learning_rate": 6.623058769673819e-05, "loss": 3.2156, "step": 224200 }, { "epoch": 1.9803422879245483, "grad_norm": 1.3600300550460815, "learning_rate": 6.621744340561315e-05, "loss": 3.1979, "step": 224250 }, { "epoch": 1.9807838358148326, "grad_norm": 1.7294834852218628, "learning_rate": 6.620429786182816e-05, "loss": 3.1528, "step": 224300 }, { "epoch": 1.9812253837051168, "grad_norm": 1.5742638111114502, "learning_rate": 6.619115106639854e-05, "loss": 3.531, "step": 224350 }, { "epoch": 1.981666931595401, "grad_norm": 2.426547050476074, "learning_rate": 6.617800302033983e-05, "loss": 3.0334, "step": 224400 }, { "epoch": 1.9821084794856851, "grad_norm": 1.9069656133651733, "learning_rate": 6.616485372466756e-05, "loss": 3.4618, "step": 224450 }, { "epoch": 1.9825500273759693, "grad_norm": 4.325615882873535, "learning_rate": 6.615170318039741e-05, "loss": 3.1056, "step": 224500 }, { "epoch": 1.9829915752662535, "grad_norm": 2.732570171356201, "learning_rate": 6.613855138854514e-05, "loss": 3.4351, "step": 224550 }, { "epoch": 1.9834331231565376, "grad_norm": 3.881892681121826, "learning_rate": 6.612539835012664e-05, "loss": 3.6275, "step": 224600 }, { "epoch": 1.9838746710468218, "grad_norm": 4.55607271194458, "learning_rate": 6.611224406615786e-05, "loss": 3.1314, "step": 224650 }, { "epoch": 1.984316218937106, "grad_norm": 2.960299015045166, "learning_rate": 6.609908853765485e-05, "loss": 3.1012, "step": 224700 }, { "epoch": 1.9847577668273901, "grad_norm": 4.393443584442139, "learning_rate": 6.608593176563377e-05, "loss": 3.4316, "step": 224750 }, { "epoch": 1.9851993147176743, "grad_norm": 3.698000907897949, "learning_rate": 6.607277375111086e-05, "loss": 3.4583, "step": 224800 }, { "epoch": 1.9856408626079585, "grad_norm": 2.1111578941345215, "learning_rate": 6.605961449510248e-05, "loss": 3.3533, "step": 224850 }, { "epoch": 1.9860824104982426, "grad_norm": 2.465977668762207, "learning_rate": 6.604645399862504e-05, "loss": 3.1043, "step": 224900 }, { "epoch": 1.9865239583885268, "grad_norm": 2.8451077938079834, "learning_rate": 6.603329226269511e-05, "loss": 3.1937, "step": 224950 }, { "epoch": 1.986965506278811, "grad_norm": 0.7063005566596985, "learning_rate": 6.602012928832932e-05, "loss": 2.9482, "step": 225000 }, { "epoch": 1.986965506278811, "eval_asr_loss": 0.9346238268861363, "eval_loss": 2.8609893321990967, "eval_runtime": 20.7029, "eval_samples_per_second": 37.096, "eval_steps_per_second": 9.274, "eval_tts_loss": 5.910735508017596, "step": 225000 }, { "epoch": 1.9874070541690951, "grad_norm": 5.629608631134033, "learning_rate": 6.600696507654439e-05, "loss": 3.2127, "step": 225050 }, { "epoch": 1.9878486020593793, "grad_norm": 2.0940353870391846, "learning_rate": 6.599379962835713e-05, "loss": 3.0928, "step": 225100 }, { "epoch": 1.9882901499496635, "grad_norm": 0.8996180891990662, "learning_rate": 6.598063294478448e-05, "loss": 3.1409, "step": 225150 }, { "epoch": 1.9887316978399476, "grad_norm": 5.146057605743408, "learning_rate": 6.596746502684345e-05, "loss": 2.87, "step": 225200 }, { "epoch": 1.9891732457302318, "grad_norm": 6.451409339904785, "learning_rate": 6.595429587555114e-05, "loss": 3.2195, "step": 225250 }, { "epoch": 1.989614793620516, "grad_norm": 1.7646028995513916, "learning_rate": 6.594112549192475e-05, "loss": 3.5659, "step": 225300 }, { "epoch": 1.9900563415108001, "grad_norm": 1.4490691423416138, "learning_rate": 6.59279538769816e-05, "loss": 3.3229, "step": 225350 }, { "epoch": 1.9904978894010843, "grad_norm": 2.59922194480896, "learning_rate": 6.591478103173909e-05, "loss": 2.8994, "step": 225400 }, { "epoch": 1.9909394372913685, "grad_norm": 1.5313228368759155, "learning_rate": 6.59016069572147e-05, "loss": 3.2554, "step": 225450 }, { "epoch": 1.9913809851816529, "grad_norm": 5.3769917488098145, "learning_rate": 6.588843165442599e-05, "loss": 3.1764, "step": 225500 }, { "epoch": 1.991822533071937, "grad_norm": 3.157618522644043, "learning_rate": 6.58752551243907e-05, "loss": 3.2877, "step": 225550 }, { "epoch": 1.9922640809622212, "grad_norm": 1.7196130752563477, "learning_rate": 6.586207736812655e-05, "loss": 2.787, "step": 225600 }, { "epoch": 1.9927056288525054, "grad_norm": 0.6843865513801575, "learning_rate": 6.584889838665143e-05, "loss": 3.2724, "step": 225650 }, { "epoch": 1.9931471767427895, "grad_norm": 2.9632790088653564, "learning_rate": 6.583571818098331e-05, "loss": 3.3264, "step": 225700 }, { "epoch": 1.9935887246330737, "grad_norm": 2.421201705932617, "learning_rate": 6.582253675214023e-05, "loss": 2.9584, "step": 225750 }, { "epoch": 1.9940302725233578, "grad_norm": 5.2676496505737305, "learning_rate": 6.580935410114037e-05, "loss": 3.5463, "step": 225800 }, { "epoch": 1.994471820413642, "grad_norm": 1.7726913690567017, "learning_rate": 6.579617022900196e-05, "loss": 3.0083, "step": 225850 }, { "epoch": 1.9949133683039264, "grad_norm": 2.7158071994781494, "learning_rate": 6.578298513674334e-05, "loss": 3.5157, "step": 225900 }, { "epoch": 1.9953549161942106, "grad_norm": 2.830840826034546, "learning_rate": 6.576979882538297e-05, "loss": 3.3464, "step": 225950 }, { "epoch": 1.9957964640844947, "grad_norm": 2.3365275859832764, "learning_rate": 6.575687505845887e-05, "loss": 3.4291, "step": 226000 }, { "epoch": 1.996238011974779, "grad_norm": 1.796595573425293, "learning_rate": 6.574368633628195e-05, "loss": 2.8841, "step": 226050 }, { "epoch": 1.996679559865063, "grad_norm": 4.268457889556885, "learning_rate": 6.573049639803877e-05, "loss": 3.2318, "step": 226100 }, { "epoch": 1.9971211077553472, "grad_norm": 2.008795738220215, "learning_rate": 6.571730524474815e-05, "loss": 2.9898, "step": 226150 }, { "epoch": 1.9975626556456314, "grad_norm": 1.9656585454940796, "learning_rate": 6.570411287742894e-05, "loss": 3.2079, "step": 226200 }, { "epoch": 1.9980042035359156, "grad_norm": 1.4856351613998413, "learning_rate": 6.569091929710021e-05, "loss": 2.7014, "step": 226250 }, { "epoch": 1.9984457514261997, "grad_norm": 0.6530186533927917, "learning_rate": 6.5677724504781e-05, "loss": 3.0105, "step": 226300 }, { "epoch": 1.998887299316484, "grad_norm": 5.405598163604736, "learning_rate": 6.566452850149052e-05, "loss": 3.3306, "step": 226350 }, { "epoch": 1.999328847206768, "grad_norm": 2.492218017578125, "learning_rate": 6.565133128824805e-05, "loss": 3.5534, "step": 226400 }, { "epoch": 1.9997703950970522, "grad_norm": 3.143587350845337, "learning_rate": 6.563813286607296e-05, "loss": 2.6712, "step": 226450 }, { "epoch": 2.0002119429873364, "grad_norm": 4.805309772491455, "learning_rate": 6.562493323598471e-05, "loss": 3.1063, "step": 226500 }, { "epoch": 2.0006534908776206, "grad_norm": 3.23760986328125, "learning_rate": 6.561173239900286e-05, "loss": 3.1377, "step": 226550 }, { "epoch": 2.0010950387679047, "grad_norm": 3.0754528045654297, "learning_rate": 6.559853035614707e-05, "loss": 3.1366, "step": 226600 }, { "epoch": 2.001536586658189, "grad_norm": 3.882094144821167, "learning_rate": 6.558532710843709e-05, "loss": 3.3212, "step": 226650 }, { "epoch": 2.001978134548473, "grad_norm": 1.7975034713745117, "learning_rate": 6.557212265689273e-05, "loss": 2.9501, "step": 226700 }, { "epoch": 2.0024196824387572, "grad_norm": 4.0580549240112305, "learning_rate": 6.555891700253396e-05, "loss": 3.0891, "step": 226750 }, { "epoch": 2.0028612303290414, "grad_norm": 2.229588747024536, "learning_rate": 6.554571014638077e-05, "loss": 2.6984, "step": 226800 }, { "epoch": 2.0033027782193256, "grad_norm": 1.334570050239563, "learning_rate": 6.553250208945332e-05, "loss": 2.8494, "step": 226850 }, { "epoch": 2.0037443261096097, "grad_norm": 4.698803901672363, "learning_rate": 6.551929283277178e-05, "loss": 2.8055, "step": 226900 }, { "epoch": 2.004185873999894, "grad_norm": 2.259545087814331, "learning_rate": 6.550608237735645e-05, "loss": 3.5065, "step": 226950 }, { "epoch": 2.004627421890178, "grad_norm": 2.018728256225586, "learning_rate": 6.549287072422778e-05, "loss": 3.5027, "step": 227000 }, { "epoch": 2.0050689697804622, "grad_norm": 3.1825854778289795, "learning_rate": 6.547965787440621e-05, "loss": 3.5209, "step": 227050 }, { "epoch": 2.0055105176707464, "grad_norm": 2.5651297569274902, "learning_rate": 6.546644382891232e-05, "loss": 3.0002, "step": 227100 }, { "epoch": 2.0059520655610306, "grad_norm": 2.613448143005371, "learning_rate": 6.54532285887668e-05, "loss": 3.1573, "step": 227150 }, { "epoch": 2.0063936134513147, "grad_norm": 2.843972682952881, "learning_rate": 6.54400121549904e-05, "loss": 3.059, "step": 227200 }, { "epoch": 2.006835161341599, "grad_norm": 2.9624767303466797, "learning_rate": 6.542679452860404e-05, "loss": 2.7594, "step": 227250 }, { "epoch": 2.0072767092318835, "grad_norm": 2.1793372631073, "learning_rate": 6.541357571062856e-05, "loss": 3.4074, "step": 227300 }, { "epoch": 2.0077182571221677, "grad_norm": 1.0161021947860718, "learning_rate": 6.540035570208509e-05, "loss": 2.8815, "step": 227350 }, { "epoch": 2.008159805012452, "grad_norm": 4.125101089477539, "learning_rate": 6.538713450399472e-05, "loss": 2.9716, "step": 227400 }, { "epoch": 2.008601352902736, "grad_norm": 3.0919394493103027, "learning_rate": 6.537391211737868e-05, "loss": 3.2971, "step": 227450 }, { "epoch": 2.00904290079302, "grad_norm": 3.4798877239227295, "learning_rate": 6.536068854325832e-05, "loss": 3.0346, "step": 227500 }, { "epoch": 2.0094844486833043, "grad_norm": 2.032561779022217, "learning_rate": 6.534746378265502e-05, "loss": 2.8012, "step": 227550 }, { "epoch": 2.0099259965735885, "grad_norm": 4.441436290740967, "learning_rate": 6.533423783659027e-05, "loss": 3.1085, "step": 227600 }, { "epoch": 2.0103675444638727, "grad_norm": 2.3486709594726562, "learning_rate": 6.53210107060857e-05, "loss": 3.292, "step": 227650 }, { "epoch": 2.010809092354157, "grad_norm": 3.3359105587005615, "learning_rate": 6.530778239216294e-05, "loss": 3.2894, "step": 227700 }, { "epoch": 2.011250640244441, "grad_norm": 1.950713038444519, "learning_rate": 6.529455289584382e-05, "loss": 3.3267, "step": 227750 }, { "epoch": 2.011692188134725, "grad_norm": 4.134000778198242, "learning_rate": 6.528132221815018e-05, "loss": 3.3464, "step": 227800 }, { "epoch": 2.0121337360250093, "grad_norm": 5.741815567016602, "learning_rate": 6.526809036010397e-05, "loss": 2.8544, "step": 227850 }, { "epoch": 2.0125752839152935, "grad_norm": 4.831362247467041, "learning_rate": 6.525512199502562e-05, "loss": 2.9295, "step": 227900 }, { "epoch": 2.0130168318055777, "grad_norm": 3.7244808673858643, "learning_rate": 6.52418878028967e-05, "loss": 3.2785, "step": 227950 }, { "epoch": 2.013458379695862, "grad_norm": 1.5436469316482544, "learning_rate": 6.522865243346119e-05, "loss": 3.2067, "step": 228000 }, { "epoch": 2.013458379695862, "eval_asr_loss": 0.9319613288593795, "eval_loss": 2.857133626937866, "eval_runtime": 20.3432, "eval_samples_per_second": 37.752, "eval_steps_per_second": 9.438, "eval_tts_loss": 6.035552883036707, "step": 228000 }, { "epoch": 2.013899927586146, "grad_norm": 3.8790123462677, "learning_rate": 6.521541588774139e-05, "loss": 2.8997, "step": 228050 }, { "epoch": 2.01434147547643, "grad_norm": 5.026811599731445, "learning_rate": 6.520217816675975e-05, "loss": 2.8402, "step": 228100 }, { "epoch": 2.0147830233667143, "grad_norm": 2.0185930728912354, "learning_rate": 6.518893927153876e-05, "loss": 2.6189, "step": 228150 }, { "epoch": 2.0152245712569985, "grad_norm": 1.0025949478149414, "learning_rate": 6.517569920310102e-05, "loss": 2.6175, "step": 228200 }, { "epoch": 2.0156661191472827, "grad_norm": 1.5849015712738037, "learning_rate": 6.516245796246919e-05, "loss": 3.1103, "step": 228250 }, { "epoch": 2.016107667037567, "grad_norm": 2.3523590564727783, "learning_rate": 6.514921555066604e-05, "loss": 3.604, "step": 228300 }, { "epoch": 2.016549214927851, "grad_norm": 3.9185702800750732, "learning_rate": 6.513597196871448e-05, "loss": 2.7966, "step": 228350 }, { "epoch": 2.016990762818135, "grad_norm": 1.396605372428894, "learning_rate": 6.512272721763743e-05, "loss": 3.2534, "step": 228400 }, { "epoch": 2.0174323107084193, "grad_norm": 1.2401235103607178, "learning_rate": 6.510948129845793e-05, "loss": 2.8992, "step": 228450 }, { "epoch": 2.0178738585987035, "grad_norm": 2.9374325275421143, "learning_rate": 6.509623421219913e-05, "loss": 2.8456, "step": 228500 }, { "epoch": 2.0183154064889877, "grad_norm": 0.8914576768875122, "learning_rate": 6.508298595988425e-05, "loss": 2.9675, "step": 228550 }, { "epoch": 2.018756954379272, "grad_norm": 1.5249571800231934, "learning_rate": 6.506973654253658e-05, "loss": 3.0548, "step": 228600 }, { "epoch": 2.019198502269556, "grad_norm": 1.9931347370147705, "learning_rate": 6.505648596117956e-05, "loss": 3.2147, "step": 228650 }, { "epoch": 2.01964005015984, "grad_norm": 2.841373920440674, "learning_rate": 6.50432342168367e-05, "loss": 3.4644, "step": 228700 }, { "epoch": 2.0200815980501243, "grad_norm": 2.344156503677368, "learning_rate": 6.502998131053155e-05, "loss": 3.1103, "step": 228750 }, { "epoch": 2.0205231459404085, "grad_norm": 3.8044936656951904, "learning_rate": 6.50167272432878e-05, "loss": 3.1707, "step": 228800 }, { "epoch": 2.0209646938306927, "grad_norm": 2.157287120819092, "learning_rate": 6.500347201612919e-05, "loss": 3.36, "step": 228850 }, { "epoch": 2.0214062417209773, "grad_norm": 1.3241900205612183, "learning_rate": 6.49902156300796e-05, "loss": 2.7496, "step": 228900 }, { "epoch": 2.0218477896112614, "grad_norm": 4.144221305847168, "learning_rate": 6.497695808616296e-05, "loss": 2.9482, "step": 228950 }, { "epoch": 2.0222893375015456, "grad_norm": 2.7052125930786133, "learning_rate": 6.49636993854033e-05, "loss": 2.9198, "step": 229000 }, { "epoch": 2.0227308853918298, "grad_norm": 2.5557072162628174, "learning_rate": 6.495043952882477e-05, "loss": 3.2878, "step": 229050 }, { "epoch": 2.023172433282114, "grad_norm": 2.4937329292297363, "learning_rate": 6.493717851745157e-05, "loss": 3.0772, "step": 229100 }, { "epoch": 2.023613981172398, "grad_norm": 1.7631187438964844, "learning_rate": 6.492391635230797e-05, "loss": 2.9628, "step": 229150 }, { "epoch": 2.0240555290626823, "grad_norm": 2.703622341156006, "learning_rate": 6.49106530344184e-05, "loss": 3.2114, "step": 229200 }, { "epoch": 2.0244970769529664, "grad_norm": 1.6925734281539917, "learning_rate": 6.489738856480731e-05, "loss": 3.0406, "step": 229250 }, { "epoch": 2.0249386248432506, "grad_norm": 2.586226224899292, "learning_rate": 6.488412294449929e-05, "loss": 2.7732, "step": 229300 }, { "epoch": 2.0253801727335348, "grad_norm": 2.5252676010131836, "learning_rate": 6.487085617451898e-05, "loss": 2.9559, "step": 229350 }, { "epoch": 2.025821720623819, "grad_norm": 4.164633274078369, "learning_rate": 6.485758825589113e-05, "loss": 2.6783, "step": 229400 }, { "epoch": 2.026263268514103, "grad_norm": 3.092494010925293, "learning_rate": 6.484431918964058e-05, "loss": 3.1991, "step": 229450 }, { "epoch": 2.0267048164043873, "grad_norm": 2.40688419342041, "learning_rate": 6.483104897679227e-05, "loss": 3.2162, "step": 229500 }, { "epoch": 2.0271463642946714, "grad_norm": 5.002386093139648, "learning_rate": 6.481777761837116e-05, "loss": 2.9303, "step": 229550 }, { "epoch": 2.0275879121849556, "grad_norm": 1.425470232963562, "learning_rate": 6.480450511540238e-05, "loss": 2.9637, "step": 229600 }, { "epoch": 2.0280294600752398, "grad_norm": 2.9680776596069336, "learning_rate": 6.479123146891112e-05, "loss": 3.3259, "step": 229650 }, { "epoch": 2.028471007965524, "grad_norm": 5.0673041343688965, "learning_rate": 6.477795667992267e-05, "loss": 3.1311, "step": 229700 }, { "epoch": 2.028912555855808, "grad_norm": 1.1483591794967651, "learning_rate": 6.476468074946238e-05, "loss": 3.1779, "step": 229750 }, { "epoch": 2.0293541037460923, "grad_norm": 3.565528631210327, "learning_rate": 6.475140367855567e-05, "loss": 3.0531, "step": 229800 }, { "epoch": 2.0297956516363764, "grad_norm": 1.5797202587127686, "learning_rate": 6.473812546822815e-05, "loss": 3.2312, "step": 229850 }, { "epoch": 2.0302371995266606, "grad_norm": 3.3140475749969482, "learning_rate": 6.47248461195054e-05, "loss": 3.2321, "step": 229900 }, { "epoch": 2.0306787474169448, "grad_norm": 4.104597568511963, "learning_rate": 6.471156563341317e-05, "loss": 3.2755, "step": 229950 }, { "epoch": 2.031120295307229, "grad_norm": 2.909299850463867, "learning_rate": 6.469828401097722e-05, "loss": 2.8798, "step": 230000 }, { "epoch": 2.031561843197513, "grad_norm": 1.7076945304870605, "learning_rate": 6.468500125322346e-05, "loss": 3.3038, "step": 230050 }, { "epoch": 2.0320033910877973, "grad_norm": 4.370232582092285, "learning_rate": 6.467171736117791e-05, "loss": 2.9408, "step": 230100 }, { "epoch": 2.0324449389780814, "grad_norm": 2.5762276649475098, "learning_rate": 6.465843233586657e-05, "loss": 2.9553, "step": 230150 }, { "epoch": 2.0328864868683656, "grad_norm": 3.14599347114563, "learning_rate": 6.464514617831567e-05, "loss": 3.0772, "step": 230200 }, { "epoch": 2.0333280347586498, "grad_norm": 2.719356060028076, "learning_rate": 6.463185888955138e-05, "loss": 2.985, "step": 230250 }, { "epoch": 2.033769582648934, "grad_norm": 3.8772943019866943, "learning_rate": 6.461857047060009e-05, "loss": 3.143, "step": 230300 }, { "epoch": 2.034211130539218, "grad_norm": 2.380610704421997, "learning_rate": 6.460528092248819e-05, "loss": 2.863, "step": 230350 }, { "epoch": 2.0346526784295023, "grad_norm": 4.021361827850342, "learning_rate": 6.459199024624219e-05, "loss": 2.7805, "step": 230400 }, { "epoch": 2.0350942263197864, "grad_norm": 3.375150442123413, "learning_rate": 6.457869844288867e-05, "loss": 2.8781, "step": 230450 }, { "epoch": 2.035535774210071, "grad_norm": 6.324542045593262, "learning_rate": 6.456540551345431e-05, "loss": 2.6493, "step": 230500 }, { "epoch": 2.035977322100355, "grad_norm": 2.199507474899292, "learning_rate": 6.455211145896589e-05, "loss": 3.4413, "step": 230550 }, { "epoch": 2.0364188699906394, "grad_norm": 1.475187063217163, "learning_rate": 6.453881628045028e-05, "loss": 3.3786, "step": 230600 }, { "epoch": 2.0368604178809235, "grad_norm": 1.968745470046997, "learning_rate": 6.452551997893438e-05, "loss": 3.3907, "step": 230650 }, { "epoch": 2.0373019657712077, "grad_norm": 3.9331090450286865, "learning_rate": 6.451222255544523e-05, "loss": 3.0484, "step": 230700 }, { "epoch": 2.037743513661492, "grad_norm": 4.076101779937744, "learning_rate": 6.449892401100996e-05, "loss": 3.0525, "step": 230750 }, { "epoch": 2.038185061551776, "grad_norm": 1.4716242551803589, "learning_rate": 6.448562434665575e-05, "loss": 3.0944, "step": 230800 }, { "epoch": 2.03862660944206, "grad_norm": 3.958530902862549, "learning_rate": 6.447232356340991e-05, "loss": 3.3774, "step": 230850 }, { "epoch": 2.0390681573323444, "grad_norm": 3.1192853450775146, "learning_rate": 6.445902166229978e-05, "loss": 3.4414, "step": 230900 }, { "epoch": 2.0395097052226285, "grad_norm": 1.0482338666915894, "learning_rate": 6.444571864435283e-05, "loss": 3.15, "step": 230950 }, { "epoch": 2.0399512531129127, "grad_norm": 1.4265995025634766, "learning_rate": 6.443241451059662e-05, "loss": 2.9373, "step": 231000 }, { "epoch": 2.0399512531129127, "eval_asr_loss": 0.9325178723528175, "eval_loss": 2.8670003414154053, "eval_runtime": 20.813, "eval_samples_per_second": 36.9, "eval_steps_per_second": 9.225, "eval_tts_loss": 6.1085950902518045, "step": 231000 }, { "epoch": 2.040392801003197, "grad_norm": 1.4011311531066895, "learning_rate": 6.441910926205877e-05, "loss": 2.8658, "step": 231050 }, { "epoch": 2.040834348893481, "grad_norm": 1.7842819690704346, "learning_rate": 6.440580289976701e-05, "loss": 2.9198, "step": 231100 }, { "epoch": 2.041275896783765, "grad_norm": 1.5047529935836792, "learning_rate": 6.439249542474912e-05, "loss": 2.9665, "step": 231150 }, { "epoch": 2.0417174446740494, "grad_norm": 1.1403131484985352, "learning_rate": 6.4379186838033e-05, "loss": 2.9217, "step": 231200 }, { "epoch": 2.0421589925643335, "grad_norm": 2.5434775352478027, "learning_rate": 6.436587714064665e-05, "loss": 3.1017, "step": 231250 }, { "epoch": 2.0426005404546177, "grad_norm": 4.1038312911987305, "learning_rate": 6.43525663336181e-05, "loss": 3.3316, "step": 231300 }, { "epoch": 2.043042088344902, "grad_norm": 3.0226967334747314, "learning_rate": 6.433925441797549e-05, "loss": 2.7633, "step": 231350 }, { "epoch": 2.043483636235186, "grad_norm": 2.997720718383789, "learning_rate": 6.432594139474709e-05, "loss": 2.9086, "step": 231400 }, { "epoch": 2.04392518412547, "grad_norm": 1.9566609859466553, "learning_rate": 6.43126272649612e-05, "loss": 2.9525, "step": 231450 }, { "epoch": 2.0443667320157544, "grad_norm": 2.1925888061523438, "learning_rate": 6.429931202964621e-05, "loss": 3.3177, "step": 231500 }, { "epoch": 2.0448082799060385, "grad_norm": 3.2015256881713867, "learning_rate": 6.428599568983062e-05, "loss": 2.914, "step": 231550 }, { "epoch": 2.0452498277963227, "grad_norm": 3.4477548599243164, "learning_rate": 6.427267824654299e-05, "loss": 3.3198, "step": 231600 }, { "epoch": 2.045691375686607, "grad_norm": 2.724602222442627, "learning_rate": 6.425935970081203e-05, "loss": 2.8969, "step": 231650 }, { "epoch": 2.046132923576891, "grad_norm": 3.4831771850585938, "learning_rate": 6.424604005366642e-05, "loss": 3.0436, "step": 231700 }, { "epoch": 2.046574471467175, "grad_norm": 2.3419885635375977, "learning_rate": 6.423271930613503e-05, "loss": 3.4779, "step": 231750 }, { "epoch": 2.0470160193574594, "grad_norm": 2.4602243900299072, "learning_rate": 6.421939745924677e-05, "loss": 2.6744, "step": 231800 }, { "epoch": 2.0474575672477435, "grad_norm": 6.231406211853027, "learning_rate": 6.420607451403062e-05, "loss": 2.7832, "step": 231850 }, { "epoch": 2.0478991151380277, "grad_norm": 1.2035382986068726, "learning_rate": 6.419275047151569e-05, "loss": 3.0244, "step": 231900 }, { "epoch": 2.048340663028312, "grad_norm": 4.7911481857299805, "learning_rate": 6.417942533273111e-05, "loss": 2.9757, "step": 231950 }, { "epoch": 2.048782210918596, "grad_norm": 3.543363332748413, "learning_rate": 6.416609909870619e-05, "loss": 2.6808, "step": 232000 }, { "epoch": 2.04922375880888, "grad_norm": 2.8009135723114014, "learning_rate": 6.415277177047025e-05, "loss": 2.9496, "step": 232050 }, { "epoch": 2.049665306699165, "grad_norm": 4.706131458282471, "learning_rate": 6.413944334905268e-05, "loss": 2.9122, "step": 232100 }, { "epoch": 2.050106854589449, "grad_norm": 3.3189642429351807, "learning_rate": 6.412611383548301e-05, "loss": 3.0313, "step": 232150 }, { "epoch": 2.050548402479733, "grad_norm": 3.766918182373047, "learning_rate": 6.411304985357103e-05, "loss": 3.2675, "step": 232200 }, { "epoch": 2.0509899503700173, "grad_norm": 1.4554075002670288, "learning_rate": 6.409971818057778e-05, "loss": 3.2086, "step": 232250 }, { "epoch": 2.0514314982603015, "grad_norm": 5.887540340423584, "learning_rate": 6.408638541850088e-05, "loss": 2.8141, "step": 232300 }, { "epoch": 2.0518730461505856, "grad_norm": 4.158108711242676, "learning_rate": 6.407305156837013e-05, "loss": 3.099, "step": 232350 }, { "epoch": 2.05231459404087, "grad_norm": 2.646940231323242, "learning_rate": 6.40597166312155e-05, "loss": 3.4103, "step": 232400 }, { "epoch": 2.052756141931154, "grad_norm": 1.4084917306900024, "learning_rate": 6.4046380608067e-05, "loss": 3.0681, "step": 232450 }, { "epoch": 2.053197689821438, "grad_norm": 3.3093457221984863, "learning_rate": 6.403304349995468e-05, "loss": 3.1133, "step": 232500 }, { "epoch": 2.0536392377117223, "grad_norm": 7.799812316894531, "learning_rate": 6.401970530790877e-05, "loss": 3.3527, "step": 232550 }, { "epoch": 2.0540807856020065, "grad_norm": 4.245431900024414, "learning_rate": 6.40063660329595e-05, "loss": 2.9631, "step": 232600 }, { "epoch": 2.0545223334922906, "grad_norm": 2.25030517578125, "learning_rate": 6.399302567613721e-05, "loss": 3.3969, "step": 232650 }, { "epoch": 2.054963881382575, "grad_norm": 1.1634451150894165, "learning_rate": 6.397968423847236e-05, "loss": 3.495, "step": 232700 }, { "epoch": 2.055405429272859, "grad_norm": 2.125120162963867, "learning_rate": 6.396634172099544e-05, "loss": 2.9466, "step": 232750 }, { "epoch": 2.055846977163143, "grad_norm": 2.8387858867645264, "learning_rate": 6.395299812473705e-05, "loss": 2.7765, "step": 232800 }, { "epoch": 2.0562885250534273, "grad_norm": 3.0474061965942383, "learning_rate": 6.39396534507279e-05, "loss": 3.199, "step": 232850 }, { "epoch": 2.0567300729437115, "grad_norm": 5.258384704589844, "learning_rate": 6.392630769999868e-05, "loss": 3.1556, "step": 232900 }, { "epoch": 2.0571716208339956, "grad_norm": 2.827653169631958, "learning_rate": 6.391296087358032e-05, "loss": 2.6953, "step": 232950 }, { "epoch": 2.05761316872428, "grad_norm": 2.8440933227539062, "learning_rate": 6.389961297250368e-05, "loss": 3.2708, "step": 233000 }, { "epoch": 2.058054716614564, "grad_norm": 3.5404655933380127, "learning_rate": 6.388626399779982e-05, "loss": 3.0693, "step": 233050 }, { "epoch": 2.058496264504848, "grad_norm": 1.7443832159042358, "learning_rate": 6.387291395049982e-05, "loss": 3.4108, "step": 233100 }, { "epoch": 2.0589378123951323, "grad_norm": 5.929326057434082, "learning_rate": 6.385956283163485e-05, "loss": 3.2485, "step": 233150 }, { "epoch": 2.0593793602854165, "grad_norm": 1.6079530715942383, "learning_rate": 6.38462106422362e-05, "loss": 3.4657, "step": 233200 }, { "epoch": 2.0598209081757006, "grad_norm": 3.8741631507873535, "learning_rate": 6.383285738333515e-05, "loss": 2.5408, "step": 233250 }, { "epoch": 2.060262456065985, "grad_norm": 3.9816033840179443, "learning_rate": 6.38195030559632e-05, "loss": 3.1404, "step": 233300 }, { "epoch": 2.060704003956269, "grad_norm": 1.1695952415466309, "learning_rate": 6.380614766115181e-05, "loss": 3.1305, "step": 233350 }, { "epoch": 2.061145551846553, "grad_norm": 1.1502101421356201, "learning_rate": 6.379279119993259e-05, "loss": 2.7873, "step": 233400 }, { "epoch": 2.0615870997368373, "grad_norm": 2.502312660217285, "learning_rate": 6.37794336733372e-05, "loss": 2.8025, "step": 233450 }, { "epoch": 2.0620286476271215, "grad_norm": 2.5723695755004883, "learning_rate": 6.376607508239742e-05, "loss": 3.1278, "step": 233500 }, { "epoch": 2.0624701955174056, "grad_norm": 1.911758542060852, "learning_rate": 6.375271542814507e-05, "loss": 3.3527, "step": 233550 }, { "epoch": 2.06291174340769, "grad_norm": 3.6591827869415283, "learning_rate": 6.373935471161206e-05, "loss": 2.9983, "step": 233600 }, { "epoch": 2.0633532912979744, "grad_norm": 1.4186557531356812, "learning_rate": 6.372599293383042e-05, "loss": 3.1304, "step": 233650 }, { "epoch": 2.0637948391882586, "grad_norm": 3.396662950515747, "learning_rate": 6.371263009583222e-05, "loss": 3.1124, "step": 233700 }, { "epoch": 2.0642363870785427, "grad_norm": 3.51399302482605, "learning_rate": 6.36992661986496e-05, "loss": 3.1643, "step": 233750 }, { "epoch": 2.064677934968827, "grad_norm": 1.7544376850128174, "learning_rate": 6.368590124331486e-05, "loss": 3.1829, "step": 233800 }, { "epoch": 2.065119482859111, "grad_norm": 3.821058750152588, "learning_rate": 6.367253523086028e-05, "loss": 2.7627, "step": 233850 }, { "epoch": 2.0655610307493952, "grad_norm": 3.183556079864502, "learning_rate": 6.365916816231832e-05, "loss": 2.8705, "step": 233900 }, { "epoch": 2.0660025786396794, "grad_norm": 3.5835275650024414, "learning_rate": 6.364580003872142e-05, "loss": 2.798, "step": 233950 }, { "epoch": 2.0664441265299636, "grad_norm": 2.4540653228759766, "learning_rate": 6.36324308611022e-05, "loss": 2.8802, "step": 234000 }, { "epoch": 2.0664441265299636, "eval_asr_loss": 0.9346235701288164, "eval_loss": 2.8734447956085205, "eval_runtime": 20.7109, "eval_samples_per_second": 37.082, "eval_steps_per_second": 9.27, "eval_tts_loss": 6.045795924074517, "step": 234000 }, { "epoch": 2.0668856744202477, "grad_norm": 1.7151188850402832, "learning_rate": 6.361906063049328e-05, "loss": 3.08, "step": 234050 }, { "epoch": 2.067327222310532, "grad_norm": 3.399620771408081, "learning_rate": 6.360568934792743e-05, "loss": 3.4007, "step": 234100 }, { "epoch": 2.067768770200816, "grad_norm": 2.422654151916504, "learning_rate": 6.359231701443742e-05, "loss": 3.1204, "step": 234150 }, { "epoch": 2.0682103180911002, "grad_norm": 4.625797748565674, "learning_rate": 6.357894363105621e-05, "loss": 2.9898, "step": 234200 }, { "epoch": 2.0686518659813844, "grad_norm": 1.8986616134643555, "learning_rate": 6.356556919881673e-05, "loss": 2.9439, "step": 234250 }, { "epoch": 2.0690934138716686, "grad_norm": 1.4936383962631226, "learning_rate": 6.355219371875208e-05, "loss": 2.9849, "step": 234300 }, { "epoch": 2.0695349617619527, "grad_norm": 3.0682079792022705, "learning_rate": 6.353881719189538e-05, "loss": 3.2306, "step": 234350 }, { "epoch": 2.069976509652237, "grad_norm": 1.3556572198867798, "learning_rate": 6.352570718097391e-05, "loss": 3.268, "step": 234400 }, { "epoch": 2.070418057542521, "grad_norm": 1.753355860710144, "learning_rate": 6.351232858451726e-05, "loss": 3.4784, "step": 234450 }, { "epoch": 2.0708596054328052, "grad_norm": 5.882805347442627, "learning_rate": 6.349894894434779e-05, "loss": 2.8673, "step": 234500 }, { "epoch": 2.0713011533230894, "grad_norm": 3.6827399730682373, "learning_rate": 6.348556826149898e-05, "loss": 3.0578, "step": 234550 }, { "epoch": 2.0717427012133736, "grad_norm": 6.412780284881592, "learning_rate": 6.34721865370044e-05, "loss": 3.0419, "step": 234600 }, { "epoch": 2.0721842491036577, "grad_norm": 1.9119713306427002, "learning_rate": 6.345880377189763e-05, "loss": 3.4086, "step": 234650 }, { "epoch": 2.072625796993942, "grad_norm": 2.772346258163452, "learning_rate": 6.34454199672124e-05, "loss": 3.0575, "step": 234700 }, { "epoch": 2.073067344884226, "grad_norm": 4.332114219665527, "learning_rate": 6.34320351239825e-05, "loss": 3.135, "step": 234750 }, { "epoch": 2.0735088927745102, "grad_norm": 2.814565658569336, "learning_rate": 6.341864924324178e-05, "loss": 3.1281, "step": 234800 }, { "epoch": 2.0739504406647944, "grad_norm": 6.352789402008057, "learning_rate": 6.340526232602419e-05, "loss": 3.1765, "step": 234850 }, { "epoch": 2.0743919885550786, "grad_norm": 4.205619812011719, "learning_rate": 6.339187437336376e-05, "loss": 2.7536, "step": 234900 }, { "epoch": 2.0748335364453627, "grad_norm": 2.4051403999328613, "learning_rate": 6.337848538629458e-05, "loss": 2.8179, "step": 234950 }, { "epoch": 2.075275084335647, "grad_norm": 2.8886311054229736, "learning_rate": 6.336509536585089e-05, "loss": 2.9021, "step": 235000 }, { "epoch": 2.075716632225931, "grad_norm": 3.565911054611206, "learning_rate": 6.335170431306689e-05, "loss": 2.9569, "step": 235050 }, { "epoch": 2.0761581801162152, "grad_norm": 3.9193966388702393, "learning_rate": 6.333831222897695e-05, "loss": 3.364, "step": 235100 }, { "epoch": 2.0765997280064994, "grad_norm": 3.124962091445923, "learning_rate": 6.33249191146155e-05, "loss": 3.3793, "step": 235150 }, { "epoch": 2.0770412758967836, "grad_norm": 2.5896620750427246, "learning_rate": 6.331152497101705e-05, "loss": 3.0124, "step": 235200 }, { "epoch": 2.0774828237870677, "grad_norm": 1.9518678188323975, "learning_rate": 6.329812979921615e-05, "loss": 3.1021, "step": 235250 }, { "epoch": 2.0779243716773523, "grad_norm": 1.719233512878418, "learning_rate": 6.32847336002475e-05, "loss": 3.1602, "step": 235300 }, { "epoch": 2.0783659195676365, "grad_norm": 2.6694281101226807, "learning_rate": 6.327133637514583e-05, "loss": 2.9971, "step": 235350 }, { "epoch": 2.0788074674579207, "grad_norm": 3.187509775161743, "learning_rate": 6.325793812494598e-05, "loss": 2.6088, "step": 235400 }, { "epoch": 2.079249015348205, "grad_norm": 4.0148606300354, "learning_rate": 6.32445388506828e-05, "loss": 3.0347, "step": 235450 }, { "epoch": 2.079690563238489, "grad_norm": 2.926313877105713, "learning_rate": 6.323113855339134e-05, "loss": 3.0781, "step": 235500 }, { "epoch": 2.080132111128773, "grad_norm": 1.753333568572998, "learning_rate": 6.32177372341066e-05, "loss": 2.9386, "step": 235550 }, { "epoch": 2.0805736590190573, "grad_norm": 2.3865480422973633, "learning_rate": 6.320433489386375e-05, "loss": 3.2847, "step": 235600 }, { "epoch": 2.0810152069093415, "grad_norm": 3.722965717315674, "learning_rate": 6.3190931533698e-05, "loss": 2.9328, "step": 235650 }, { "epoch": 2.0814567547996257, "grad_norm": 2.7150113582611084, "learning_rate": 6.317752715464464e-05, "loss": 3.2807, "step": 235700 }, { "epoch": 2.08189830268991, "grad_norm": 2.8550362586975098, "learning_rate": 6.316412175773904e-05, "loss": 3.0331, "step": 235750 }, { "epoch": 2.082339850580194, "grad_norm": 3.4029903411865234, "learning_rate": 6.315071534401669e-05, "loss": 3.0143, "step": 235800 }, { "epoch": 2.082781398470478, "grad_norm": 2.1413564682006836, "learning_rate": 6.313730791451306e-05, "loss": 3.216, "step": 235850 }, { "epoch": 2.0832229463607623, "grad_norm": 3.0733673572540283, "learning_rate": 6.312389947026383e-05, "loss": 3.28, "step": 235900 }, { "epoch": 2.0836644942510465, "grad_norm": 4.860799789428711, "learning_rate": 6.311049001230463e-05, "loss": 3.0566, "step": 235950 }, { "epoch": 2.0841060421413307, "grad_norm": 1.6133939027786255, "learning_rate": 6.309707954167126e-05, "loss": 2.8544, "step": 236000 }, { "epoch": 2.084547590031615, "grad_norm": 3.4890334606170654, "learning_rate": 6.308366805939955e-05, "loss": 3.4803, "step": 236050 }, { "epoch": 2.084989137921899, "grad_norm": 5.022340297698975, "learning_rate": 6.307025556652543e-05, "loss": 2.8892, "step": 236100 }, { "epoch": 2.085430685812183, "grad_norm": 2.1764464378356934, "learning_rate": 6.305684206408491e-05, "loss": 2.8681, "step": 236150 }, { "epoch": 2.0858722337024673, "grad_norm": 4.524923801422119, "learning_rate": 6.304342755311406e-05, "loss": 2.9398, "step": 236200 }, { "epoch": 2.0863137815927515, "grad_norm": 2.636495590209961, "learning_rate": 6.3030012034649e-05, "loss": 2.9677, "step": 236250 }, { "epoch": 2.0867553294830357, "grad_norm": 2.2745394706726074, "learning_rate": 6.301659550972604e-05, "loss": 3.0237, "step": 236300 }, { "epoch": 2.08719687737332, "grad_norm": 6.002178192138672, "learning_rate": 6.300317797938144e-05, "loss": 2.7733, "step": 236350 }, { "epoch": 2.087638425263604, "grad_norm": NaN, "learning_rate": 6.299002782518247e-05, "loss": 3.1529, "step": 236400 }, { "epoch": 2.088079973153888, "grad_norm": 5.430758953094482, "learning_rate": 6.29766083071607e-05, "loss": 2.8299, "step": 236450 }, { "epoch": 2.0885215210441723, "grad_norm": 2.8759422302246094, "learning_rate": 6.296318778680596e-05, "loss": 2.7195, "step": 236500 }, { "epoch": 2.0889630689344565, "grad_norm": 1.781813144683838, "learning_rate": 6.29497662651549e-05, "loss": 3.2773, "step": 236550 }, { "epoch": 2.0894046168247407, "grad_norm": 6.49731969833374, "learning_rate": 6.293634374324419e-05, "loss": 3.0205, "step": 236600 }, { "epoch": 2.089846164715025, "grad_norm": 2.8632984161376953, "learning_rate": 6.292292022211063e-05, "loss": 3.0186, "step": 236650 }, { "epoch": 2.090287712605309, "grad_norm": 2.556323289871216, "learning_rate": 6.290949570279107e-05, "loss": 2.7086, "step": 236700 }, { "epoch": 2.090729260495593, "grad_norm": 3.0288796424865723, "learning_rate": 6.289607018632244e-05, "loss": 3.0225, "step": 236750 }, { "epoch": 2.0911708083858773, "grad_norm": 3.256965160369873, "learning_rate": 6.288264367374173e-05, "loss": 3.4244, "step": 236800 }, { "epoch": 2.091612356276162, "grad_norm": 8.172772407531738, "learning_rate": 6.286921616608606e-05, "loss": 3.2433, "step": 236850 }, { "epoch": 2.092053904166446, "grad_norm": 3.213986873626709, "learning_rate": 6.285578766439255e-05, "loss": 3.3647, "step": 236900 }, { "epoch": 2.0924954520567303, "grad_norm": 6.556624412536621, "learning_rate": 6.284235816969847e-05, "loss": 3.0929, "step": 236950 }, { "epoch": 2.0929369999470144, "grad_norm": 4.544995307922363, "learning_rate": 6.282892768304112e-05, "loss": 2.7749, "step": 237000 }, { "epoch": 2.0929369999470144, "eval_asr_loss": 0.9273501776573921, "eval_loss": 2.862983465194702, "eval_runtime": 20.8111, "eval_samples_per_second": 36.903, "eval_steps_per_second": 9.226, "eval_tts_loss": 6.039148560531288, "step": 237000 }, { "epoch": 2.0933785478372986, "grad_norm": 2.387843608856201, "learning_rate": 6.281549620545788e-05, "loss": 3.3171, "step": 237050 }, { "epoch": 2.0938200957275828, "grad_norm": 1.6893880367279053, "learning_rate": 6.280206373798624e-05, "loss": 3.3243, "step": 237100 }, { "epoch": 2.094261643617867, "grad_norm": 2.2369627952575684, "learning_rate": 6.27888989604742e-05, "loss": 2.6777, "step": 237150 }, { "epoch": 2.094703191508151, "grad_norm": 3.7619330883026123, "learning_rate": 6.277546453608453e-05, "loss": 2.8815, "step": 237200 }, { "epoch": 2.0951447393984353, "grad_norm": 2.0962555408477783, "learning_rate": 6.276202912489859e-05, "loss": 3.2917, "step": 237250 }, { "epoch": 2.0955862872887194, "grad_norm": 1.178427815437317, "learning_rate": 6.274859272795408e-05, "loss": 2.746, "step": 237300 }, { "epoch": 2.0960278351790036, "grad_norm": 1.9152957201004028, "learning_rate": 6.273515534628891e-05, "loss": 3.2423, "step": 237350 }, { "epoch": 2.0964693830692878, "grad_norm": 2.9031455516815186, "learning_rate": 6.272171698094098e-05, "loss": 3.0746, "step": 237400 }, { "epoch": 2.096910930959572, "grad_norm": 2.8071396350860596, "learning_rate": 6.270827763294828e-05, "loss": 3.2411, "step": 237450 }, { "epoch": 2.097352478849856, "grad_norm": 4.363584041595459, "learning_rate": 6.269483730334891e-05, "loss": 3.1862, "step": 237500 }, { "epoch": 2.0977940267401403, "grad_norm": 1.6039600372314453, "learning_rate": 6.268139599318099e-05, "loss": 3.4092, "step": 237550 }, { "epoch": 2.0982355746304244, "grad_norm": 5.065496921539307, "learning_rate": 6.266795370348278e-05, "loss": 2.8442, "step": 237600 }, { "epoch": 2.0986771225207086, "grad_norm": 3.6026055812835693, "learning_rate": 6.265451043529256e-05, "loss": 3.2712, "step": 237650 }, { "epoch": 2.0991186704109928, "grad_norm": 3.2399210929870605, "learning_rate": 6.264106618964872e-05, "loss": 3.0832, "step": 237700 }, { "epoch": 2.099560218301277, "grad_norm": 3.737478494644165, "learning_rate": 6.26276209675897e-05, "loss": 3.2228, "step": 237750 }, { "epoch": 2.100001766191561, "grad_norm": 1.1522648334503174, "learning_rate": 6.261417477015404e-05, "loss": 3.1848, "step": 237800 }, { "epoch": 2.1004433140818453, "grad_norm": 3.852766513824463, "learning_rate": 6.260072759838035e-05, "loss": 2.6, "step": 237850 }, { "epoch": 2.1008848619721294, "grad_norm": 3.893470287322998, "learning_rate": 6.258727945330731e-05, "loss": 2.656, "step": 237900 }, { "epoch": 2.1013264098624136, "grad_norm": 2.8423218727111816, "learning_rate": 6.257383033597367e-05, "loss": 2.7779, "step": 237950 }, { "epoch": 2.1017679577526978, "grad_norm": 7.985106468200684, "learning_rate": 6.256038024741827e-05, "loss": 2.403, "step": 238000 }, { "epoch": 2.102209505642982, "grad_norm": 1.0427534580230713, "learning_rate": 6.254692918867997e-05, "loss": 3.2697, "step": 238050 }, { "epoch": 2.102651053533266, "grad_norm": 3.9906439781188965, "learning_rate": 6.25334771607978e-05, "loss": 3.2458, "step": 238100 }, { "epoch": 2.1030926014235503, "grad_norm": 3.478618860244751, "learning_rate": 6.252002416481082e-05, "loss": 3.1903, "step": 238150 }, { "epoch": 2.1035341493138344, "grad_norm": 4.565757751464844, "learning_rate": 6.250657020175811e-05, "loss": 3.3304, "step": 238200 }, { "epoch": 2.1039756972041186, "grad_norm": 4.789504051208496, "learning_rate": 6.249311527267892e-05, "loss": 3.4375, "step": 238250 }, { "epoch": 2.1044172450944028, "grad_norm": 2.925300121307373, "learning_rate": 6.247965937861251e-05, "loss": 3.4789, "step": 238300 }, { "epoch": 2.104858792984687, "grad_norm": 2.9117133617401123, "learning_rate": 6.246620252059824e-05, "loss": 3.0623, "step": 238350 }, { "epoch": 2.105300340874971, "grad_norm": 0.9601924419403076, "learning_rate": 6.245274469967553e-05, "loss": 3.3377, "step": 238400 }, { "epoch": 2.1057418887652553, "grad_norm": 3.297055959701538, "learning_rate": 6.24392859168839e-05, "loss": 2.9941, "step": 238450 }, { "epoch": 2.10618343665554, "grad_norm": 2.3726651668548584, "learning_rate": 6.24258261732629e-05, "loss": 3.4003, "step": 238500 }, { "epoch": 2.106624984545824, "grad_norm": 2.375788927078247, "learning_rate": 6.241236546985221e-05, "loss": 3.079, "step": 238550 }, { "epoch": 2.107066532436108, "grad_norm": 0.5383282899856567, "learning_rate": 6.239890380769154e-05, "loss": 2.3831, "step": 238600 }, { "epoch": 2.1075080803263924, "grad_norm": 3.9929215908050537, "learning_rate": 6.238544118782069e-05, "loss": 3.3199, "step": 238650 }, { "epoch": 2.1079496282166765, "grad_norm": 2.6869723796844482, "learning_rate": 6.23719776112795e-05, "loss": 3.6369, "step": 238700 }, { "epoch": 2.1083911761069607, "grad_norm": 3.927208662033081, "learning_rate": 6.2358513079108e-05, "loss": 3.2033, "step": 238750 }, { "epoch": 2.108832723997245, "grad_norm": 2.8071539402008057, "learning_rate": 6.234504759234613e-05, "loss": 3.4232, "step": 238800 }, { "epoch": 2.109274271887529, "grad_norm": 1.4641625881195068, "learning_rate": 6.233158115203403e-05, "loss": 2.7924, "step": 238850 }, { "epoch": 2.109715819777813, "grad_norm": 1.6354135274887085, "learning_rate": 6.231811375921184e-05, "loss": 2.7455, "step": 238900 }, { "epoch": 2.1101573676680974, "grad_norm": 2.952073574066162, "learning_rate": 6.230464541491985e-05, "loss": 3.3115, "step": 238950 }, { "epoch": 2.1105989155583815, "grad_norm": 1.168439507484436, "learning_rate": 6.229117612019832e-05, "loss": 2.6336, "step": 239000 }, { "epoch": 2.1110404634486657, "grad_norm": 2.487980842590332, "learning_rate": 6.227770587608767e-05, "loss": 2.9631, "step": 239050 }, { "epoch": 2.11148201133895, "grad_norm": 3.3226206302642822, "learning_rate": 6.226423468362834e-05, "loss": 3.1342, "step": 239100 }, { "epoch": 2.111923559229234, "grad_norm": 1.275766372680664, "learning_rate": 6.225076254386088e-05, "loss": 2.7905, "step": 239150 }, { "epoch": 2.112365107119518, "grad_norm": 3.030874252319336, "learning_rate": 6.223728945782591e-05, "loss": 2.9667, "step": 239200 }, { "epoch": 2.1128066550098024, "grad_norm": 4.319136142730713, "learning_rate": 6.222381542656408e-05, "loss": 3.2937, "step": 239250 }, { "epoch": 2.1132482029000865, "grad_norm": 2.6660947799682617, "learning_rate": 6.221034045111617e-05, "loss": 3.058, "step": 239300 }, { "epoch": 2.1136897507903707, "grad_norm": 3.479022741317749, "learning_rate": 6.2196864532523e-05, "loss": 3.3027, "step": 239350 }, { "epoch": 2.114131298680655, "grad_norm": 1.8305370807647705, "learning_rate": 6.218338767182548e-05, "loss": 2.9988, "step": 239400 }, { "epoch": 2.114572846570939, "grad_norm": 2.012442111968994, "learning_rate": 6.216990987006457e-05, "loss": 3.2906, "step": 239450 }, { "epoch": 2.115014394461223, "grad_norm": 1.7256959676742554, "learning_rate": 6.215643112828133e-05, "loss": 2.9947, "step": 239500 }, { "epoch": 2.1154559423515074, "grad_norm": 2.1054117679595947, "learning_rate": 6.214295144751684e-05, "loss": 2.5548, "step": 239550 }, { "epoch": 2.1158974902417915, "grad_norm": 3.857567071914673, "learning_rate": 6.212947082881234e-05, "loss": 2.6859, "step": 239600 }, { "epoch": 2.1163390381320757, "grad_norm": 3.1762421131134033, "learning_rate": 6.211598927320907e-05, "loss": 2.8899, "step": 239650 }, { "epoch": 2.11678058602236, "grad_norm": 4.7173237800598145, "learning_rate": 6.210250678174837e-05, "loss": 3.0474, "step": 239700 }, { "epoch": 2.117222133912644, "grad_norm": 1.7269694805145264, "learning_rate": 6.208902335547166e-05, "loss": 3.3108, "step": 239750 }, { "epoch": 2.117663681802928, "grad_norm": 4.368720054626465, "learning_rate": 6.207553899542039e-05, "loss": 3.0016, "step": 239800 }, { "epoch": 2.1181052296932124, "grad_norm": 2.5750601291656494, "learning_rate": 6.206205370263612e-05, "loss": 2.912, "step": 239850 }, { "epoch": 2.1185467775834965, "grad_norm": 4.731289386749268, "learning_rate": 6.204856747816052e-05, "loss": 2.9171, "step": 239900 }, { "epoch": 2.1189883254737807, "grad_norm": 1.6712313890457153, "learning_rate": 6.203508032303524e-05, "loss": 2.8296, "step": 239950 }, { "epoch": 2.119429873364065, "grad_norm": 0.9750666618347168, "learning_rate": 6.202159223830204e-05, "loss": 3.4434, "step": 240000 }, { "epoch": 2.119429873364065, "eval_asr_loss": 0.9326350055396521, "eval_loss": 2.8607914447784424, "eval_runtime": 20.6006, "eval_samples_per_second": 37.28, "eval_steps_per_second": 9.32, "eval_tts_loss": 6.059072890020006, "step": 240000 }, { "epoch": 2.1198714212543495, "grad_norm": 5.349383354187012, "learning_rate": 6.200810322500278e-05, "loss": 3.0129, "step": 240050 }, { "epoch": 2.1203129691446336, "grad_norm": 3.87054443359375, "learning_rate": 6.19946132841794e-05, "loss": 3.0981, "step": 240100 }, { "epoch": 2.120754517034918, "grad_norm": 2.3254106044769287, "learning_rate": 6.198112241687383e-05, "loss": 3.0004, "step": 240150 }, { "epoch": 2.121196064925202, "grad_norm": 4.357524871826172, "learning_rate": 6.196763062412816e-05, "loss": 2.987, "step": 240200 }, { "epoch": 2.121637612815486, "grad_norm": 1.6933478116989136, "learning_rate": 6.19541379069845e-05, "loss": 2.9078, "step": 240250 }, { "epoch": 2.1220791607057703, "grad_norm": 1.5506831407546997, "learning_rate": 6.194064426648507e-05, "loss": 3.2328, "step": 240300 }, { "epoch": 2.1225207085960545, "grad_norm": 4.903199672698975, "learning_rate": 6.192714970367211e-05, "loss": 3.0385, "step": 240350 }, { "epoch": 2.1229622564863386, "grad_norm": 1.618307113647461, "learning_rate": 6.191365421958797e-05, "loss": 3.3363, "step": 240400 }, { "epoch": 2.123403804376623, "grad_norm": 3.6995747089385986, "learning_rate": 6.190015781527508e-05, "loss": 2.7186, "step": 240450 }, { "epoch": 2.123845352266907, "grad_norm": 4.0562944412231445, "learning_rate": 6.18866604917759e-05, "loss": 3.1056, "step": 240500 }, { "epoch": 2.124286900157191, "grad_norm": 2.19975209236145, "learning_rate": 6.1873162250133e-05, "loss": 3.6208, "step": 240550 }, { "epoch": 2.1247284480474753, "grad_norm": 2.4086647033691406, "learning_rate": 6.1859663091389e-05, "loss": 3.0413, "step": 240600 }, { "epoch": 2.1251699959377595, "grad_norm": 1.795384407043457, "learning_rate": 6.184616301658658e-05, "loss": 2.8259, "step": 240650 }, { "epoch": 2.1256115438280436, "grad_norm": 2.943624973297119, "learning_rate": 6.183266202676854e-05, "loss": 2.8549, "step": 240700 }, { "epoch": 2.126053091718328, "grad_norm": 5.150783538818359, "learning_rate": 6.181916012297767e-05, "loss": 2.8026, "step": 240750 }, { "epoch": 2.126494639608612, "grad_norm": 2.3909823894500732, "learning_rate": 6.180565730625692e-05, "loss": 3.0313, "step": 240800 }, { "epoch": 2.126936187498896, "grad_norm": 3.5351691246032715, "learning_rate": 6.179215357764925e-05, "loss": 2.6941, "step": 240850 }, { "epoch": 2.1273777353891803, "grad_norm": 4.411786079406738, "learning_rate": 6.177864893819771e-05, "loss": 3.0214, "step": 240900 }, { "epoch": 2.1278192832794645, "grad_norm": 3.681881904602051, "learning_rate": 6.176514338894543e-05, "loss": 3.1937, "step": 240950 }, { "epoch": 2.1282608311697486, "grad_norm": 3.171037435531616, "learning_rate": 6.175163693093556e-05, "loss": 2.8394, "step": 241000 }, { "epoch": 2.128702379060033, "grad_norm": 3.15596866607666, "learning_rate": 6.173812956521141e-05, "loss": 3.3151, "step": 241050 }, { "epoch": 2.129143926950317, "grad_norm": 0.9684367179870605, "learning_rate": 6.17246212928163e-05, "loss": 3.0942, "step": 241100 }, { "epoch": 2.129585474840601, "grad_norm": 1.9763288497924805, "learning_rate": 6.171111211479358e-05, "loss": 2.7623, "step": 241150 }, { "epoch": 2.1300270227308853, "grad_norm": 4.591089248657227, "learning_rate": 6.16976020321868e-05, "loss": 3.0964, "step": 241200 }, { "epoch": 2.1304685706211695, "grad_norm": 2.27897047996521, "learning_rate": 6.168409104603943e-05, "loss": 3.069, "step": 241250 }, { "epoch": 2.1309101185114536, "grad_norm": 2.3860621452331543, "learning_rate": 6.167057915739511e-05, "loss": 3.6816, "step": 241300 }, { "epoch": 2.131351666401738, "grad_norm": 3.626079797744751, "learning_rate": 6.165706636729752e-05, "loss": 3.0158, "step": 241350 }, { "epoch": 2.131793214292022, "grad_norm": 4.617300987243652, "learning_rate": 6.164355267679037e-05, "loss": 3.0871, "step": 241400 }, { "epoch": 2.132234762182306, "grad_norm": 4.720652103424072, "learning_rate": 6.163003808691754e-05, "loss": 3.0995, "step": 241450 }, { "epoch": 2.1326763100725903, "grad_norm": 4.754398345947266, "learning_rate": 6.16165225987229e-05, "loss": 3.0797, "step": 241500 }, { "epoch": 2.1331178579628745, "grad_norm": 1.57192862033844, "learning_rate": 6.160300621325037e-05, "loss": 3.4888, "step": 241550 }, { "epoch": 2.133559405853159, "grad_norm": 3.342073440551758, "learning_rate": 6.158948893154401e-05, "loss": 3.5425, "step": 241600 }, { "epoch": 2.134000953743443, "grad_norm": 1.9215145111083984, "learning_rate": 6.15759707546479e-05, "loss": 2.7573, "step": 241650 }, { "epoch": 2.1344425016337274, "grad_norm": 2.9148123264312744, "learning_rate": 6.156245168360621e-05, "loss": 3.0206, "step": 241700 }, { "epoch": 2.1348840495240116, "grad_norm": 2.3337414264678955, "learning_rate": 6.154893171946319e-05, "loss": 3.3977, "step": 241750 }, { "epoch": 2.1353255974142957, "grad_norm": 3.570500612258911, "learning_rate": 6.15354108632631e-05, "loss": 3.1962, "step": 241800 }, { "epoch": 2.13576714530458, "grad_norm": 0.8573853969573975, "learning_rate": 6.152188911605037e-05, "loss": 2.7685, "step": 241850 }, { "epoch": 2.136208693194864, "grad_norm": 6.819091796875, "learning_rate": 6.150836647886937e-05, "loss": 3.1988, "step": 241900 }, { "epoch": 2.1366502410851482, "grad_norm": 3.854794979095459, "learning_rate": 6.149484295276467e-05, "loss": 3.0705, "step": 241950 }, { "epoch": 2.1370917889754324, "grad_norm": 4.04559850692749, "learning_rate": 6.148131853878083e-05, "loss": 3.1822, "step": 242000 }, { "epoch": 2.1375333368657166, "grad_norm": 3.1430141925811768, "learning_rate": 6.146779323796248e-05, "loss": 2.98, "step": 242050 }, { "epoch": 2.1379748847560007, "grad_norm": 2.6168956756591797, "learning_rate": 6.145426705135436e-05, "loss": 2.991, "step": 242100 }, { "epoch": 2.138416432646285, "grad_norm": 2.851386308670044, "learning_rate": 6.144073998000123e-05, "loss": 2.8305, "step": 242150 }, { "epoch": 2.138857980536569, "grad_norm": 2.7866199016571045, "learning_rate": 6.142721202494795e-05, "loss": 3.1857, "step": 242200 }, { "epoch": 2.1392995284268532, "grad_norm": 1.859724760055542, "learning_rate": 6.141368318723946e-05, "loss": 2.7095, "step": 242250 }, { "epoch": 2.1397410763171374, "grad_norm": 2.3074333667755127, "learning_rate": 6.14001534679207e-05, "loss": 3.1211, "step": 242300 }, { "epoch": 2.1401826242074216, "grad_norm": 3.469409942626953, "learning_rate": 6.138662286803677e-05, "loss": 2.8818, "step": 242350 }, { "epoch": 2.1406241720977057, "grad_norm": 4.329566478729248, "learning_rate": 6.137309138863278e-05, "loss": 3.3215, "step": 242400 }, { "epoch": 2.14106571998799, "grad_norm": 4.724846363067627, "learning_rate": 6.135955903075392e-05, "loss": 3.3933, "step": 242450 }, { "epoch": 2.141507267878274, "grad_norm": 1.137832522392273, "learning_rate": 6.134602579544546e-05, "loss": 2.7406, "step": 242500 }, { "epoch": 2.1419488157685582, "grad_norm": 2.7646424770355225, "learning_rate": 6.133249168375269e-05, "loss": 3.569, "step": 242550 }, { "epoch": 2.1423903636588424, "grad_norm": 6.01995325088501, "learning_rate": 6.131895669672106e-05, "loss": 3.3849, "step": 242600 }, { "epoch": 2.1428319115491266, "grad_norm": 1.6857974529266357, "learning_rate": 6.130542083539601e-05, "loss": 3.0329, "step": 242650 }, { "epoch": 2.1432734594394107, "grad_norm": 5.805901050567627, "learning_rate": 6.129188410082305e-05, "loss": 2.8821, "step": 242700 }, { "epoch": 2.143715007329695, "grad_norm": 2.3712074756622314, "learning_rate": 6.12783464940478e-05, "loss": 3.1749, "step": 242750 }, { "epoch": 2.144156555219979, "grad_norm": 1.9185987710952759, "learning_rate": 6.126480801611593e-05, "loss": 3.5455, "step": 242800 }, { "epoch": 2.144598103110263, "grad_norm": 2.8377609252929688, "learning_rate": 6.125126866807315e-05, "loss": 3.174, "step": 242850 }, { "epoch": 2.1450396510005474, "grad_norm": 1.1797131299972534, "learning_rate": 6.123772845096529e-05, "loss": 3.1248, "step": 242900 }, { "epoch": 2.1454811988908316, "grad_norm": 2.4720191955566406, "learning_rate": 6.122418736583819e-05, "loss": 3.3231, "step": 242950 }, { "epoch": 2.1459227467811157, "grad_norm": 2.443220376968384, "learning_rate": 6.121064541373779e-05, "loss": 3.1198, "step": 243000 }, { "epoch": 2.1459227467811157, "eval_asr_loss": 0.9242461907759509, "eval_loss": 2.86102032661438, "eval_runtime": 20.6095, "eval_samples_per_second": 37.264, "eval_steps_per_second": 9.316, "eval_tts_loss": 6.029543186687746, "step": 243000 }, { "epoch": 2.1463642946714, "grad_norm": 3.9056832790374756, "learning_rate": 6.11971025957101e-05, "loss": 3.4246, "step": 243050 }, { "epoch": 2.146805842561684, "grad_norm": 5.718588352203369, "learning_rate": 6.118355891280119e-05, "loss": 3.04, "step": 243100 }, { "epoch": 2.147247390451968, "grad_norm": 1.7407891750335693, "learning_rate": 6.117001436605718e-05, "loss": 3.231, "step": 243150 }, { "epoch": 2.1476889383422524, "grad_norm": 2.6677162647247314, "learning_rate": 6.11567398731635e-05, "loss": 3.4042, "step": 243200 }, { "epoch": 2.148130486232537, "grad_norm": 2.967630386352539, "learning_rate": 6.114319361911259e-05, "loss": 3.191, "step": 243250 }, { "epoch": 2.148572034122821, "grad_norm": 4.103931903839111, "learning_rate": 6.112964650434443e-05, "loss": 3.0904, "step": 243300 }, { "epoch": 2.1490135820131053, "grad_norm": 4.89622163772583, "learning_rate": 6.111609852990548e-05, "loss": 2.9498, "step": 243350 }, { "epoch": 2.1494551299033895, "grad_norm": 2.395061492919922, "learning_rate": 6.110254969684219e-05, "loss": 3.1946, "step": 243400 }, { "epoch": 2.1498966777936737, "grad_norm": 3.4221737384796143, "learning_rate": 6.108900000620106e-05, "loss": 3.2357, "step": 243450 }, { "epoch": 2.150338225683958, "grad_norm": 4.201228141784668, "learning_rate": 6.107544945902872e-05, "loss": 2.9938, "step": 243500 }, { "epoch": 2.150779773574242, "grad_norm": 4.521472930908203, "learning_rate": 6.106189805637184e-05, "loss": 3.3467, "step": 243550 }, { "epoch": 2.151221321464526, "grad_norm": 4.29344367980957, "learning_rate": 6.104834579927711e-05, "loss": 3.0526, "step": 243600 }, { "epoch": 2.1516628693548103, "grad_norm": 2.7563273906707764, "learning_rate": 6.103506375935755e-05, "loss": 3.5406, "step": 243650 }, { "epoch": 2.1521044172450945, "grad_norm": 5.406722068786621, "learning_rate": 6.102150981356427e-05, "loss": 2.2932, "step": 243700 }, { "epoch": 2.1525459651353787, "grad_norm": 2.6092753410339355, "learning_rate": 6.100795501645281e-05, "loss": 3.2518, "step": 243750 }, { "epoch": 2.152987513025663, "grad_norm": 3.871994972229004, "learning_rate": 6.0994399369070176e-05, "loss": 2.6694, "step": 243800 }, { "epoch": 2.153429060915947, "grad_norm": 5.2820892333984375, "learning_rate": 6.098084287246343e-05, "loss": 3.4533, "step": 243850 }, { "epoch": 2.153870608806231, "grad_norm": 2.723698139190674, "learning_rate": 6.096728552767967e-05, "loss": 3.1388, "step": 243900 }, { "epoch": 2.1543121566965153, "grad_norm": 3.1074745655059814, "learning_rate": 6.095372733576611e-05, "loss": 3.4028, "step": 243950 }, { "epoch": 2.1547537045867995, "grad_norm": 3.3812079429626465, "learning_rate": 6.094016829776998e-05, "loss": 2.7625, "step": 244000 }, { "epoch": 2.1551952524770837, "grad_norm": 3.0963339805603027, "learning_rate": 6.092660841473865e-05, "loss": 2.8601, "step": 244050 }, { "epoch": 2.155636800367368, "grad_norm": 4.549088478088379, "learning_rate": 6.091304768771947e-05, "loss": 3.0102, "step": 244100 }, { "epoch": 2.156078348257652, "grad_norm": 2.3300344944000244, "learning_rate": 6.089948611775988e-05, "loss": 3.0406, "step": 244150 }, { "epoch": 2.156519896147936, "grad_norm": 2.152743339538574, "learning_rate": 6.088592370590742e-05, "loss": 2.8996, "step": 244200 }, { "epoch": 2.1569614440382203, "grad_norm": 1.4900521039962769, "learning_rate": 6.087236045320966e-05, "loss": 3.0599, "step": 244250 }, { "epoch": 2.1574029919285045, "grad_norm": 2.646312713623047, "learning_rate": 6.0858796360714256e-05, "loss": 2.868, "step": 244300 }, { "epoch": 2.1578445398187887, "grad_norm": 3.2758681774139404, "learning_rate": 6.0845231429468916e-05, "loss": 3.0807, "step": 244350 }, { "epoch": 2.158286087709073, "grad_norm": 2.091062545776367, "learning_rate": 6.0831665660521395e-05, "loss": 3.0439, "step": 244400 }, { "epoch": 2.158727635599357, "grad_norm": 1.417259693145752, "learning_rate": 6.0818099054919574e-05, "loss": 3.0763, "step": 244450 }, { "epoch": 2.159169183489641, "grad_norm": 0.9007434844970703, "learning_rate": 6.080453161371133e-05, "loss": 3.0514, "step": 244500 }, { "epoch": 2.1596107313799253, "grad_norm": 2.9796414375305176, "learning_rate": 6.079096333794463e-05, "loss": 2.9235, "step": 244550 }, { "epoch": 2.1600522792702095, "grad_norm": 2.401273250579834, "learning_rate": 6.077739422866752e-05, "loss": 2.8255, "step": 244600 }, { "epoch": 2.1604938271604937, "grad_norm": 2.502058982849121, "learning_rate": 6.0763824286928096e-05, "loss": 3.7429, "step": 244650 }, { "epoch": 2.160935375050778, "grad_norm": 4.2916178703308105, "learning_rate": 6.075025351377453e-05, "loss": 2.7973, "step": 244700 }, { "epoch": 2.161376922941062, "grad_norm": 2.6151785850524902, "learning_rate": 6.0736681910255024e-05, "loss": 3.1876, "step": 244750 }, { "epoch": 2.1618184708313466, "grad_norm": 5.386536598205566, "learning_rate": 6.0723109477417896e-05, "loss": 3.115, "step": 244800 }, { "epoch": 2.1622600187216303, "grad_norm": 3.8035407066345215, "learning_rate": 6.070953621631148e-05, "loss": 2.852, "step": 244850 }, { "epoch": 2.162701566611915, "grad_norm": 0.988106906414032, "learning_rate": 6.0695962127984204e-05, "loss": 3.1852, "step": 244900 }, { "epoch": 2.163143114502199, "grad_norm": 2.1697728633880615, "learning_rate": 6.068238721348456e-05, "loss": 3.2436, "step": 244950 }, { "epoch": 2.1635846623924833, "grad_norm": 1.6820136308670044, "learning_rate": 6.06688114738611e-05, "loss": 2.865, "step": 245000 }, { "epoch": 2.1640262102827674, "grad_norm": 4.40377950668335, "learning_rate": 6.065523491016241e-05, "loss": 2.9429, "step": 245050 }, { "epoch": 2.1644677581730516, "grad_norm": 2.3662831783294678, "learning_rate": 6.064165752343718e-05, "loss": 3.0624, "step": 245100 }, { "epoch": 2.1649093060633358, "grad_norm": 3.140277147293091, "learning_rate": 6.062807931473413e-05, "loss": 2.7547, "step": 245150 }, { "epoch": 2.16535085395362, "grad_norm": 5.113159656524658, "learning_rate": 6.0614771873733054e-05, "loss": 3.41, "step": 245200 }, { "epoch": 2.165792401843904, "grad_norm": 1.3435204029083252, "learning_rate": 6.06011920406082e-05, "loss": 3.3534, "step": 245250 }, { "epoch": 2.1662339497341883, "grad_norm": 1.744178056716919, "learning_rate": 6.058761138863115e-05, "loss": 2.7636, "step": 245300 }, { "epoch": 2.1666754976244724, "grad_norm": 6.074828624725342, "learning_rate": 6.0574029918850905e-05, "loss": 3.0243, "step": 245350 }, { "epoch": 2.1671170455147566, "grad_norm": 3.06844425201416, "learning_rate": 6.056044763231652e-05, "loss": 3.4827, "step": 245400 }, { "epoch": 2.1675585934050408, "grad_norm": 4.505370616912842, "learning_rate": 6.0546864530077084e-05, "loss": 2.7537, "step": 245450 }, { "epoch": 2.168000141295325, "grad_norm": 3.3782474994659424, "learning_rate": 6.053328061318182e-05, "loss": 2.8607, "step": 245500 }, { "epoch": 2.168441689185609, "grad_norm": 2.681347608566284, "learning_rate": 6.051969588267994e-05, "loss": 2.8934, "step": 245550 }, { "epoch": 2.1688832370758933, "grad_norm": 1.9563655853271484, "learning_rate": 6.050611033962076e-05, "loss": 3.3919, "step": 245600 }, { "epoch": 2.1693247849661774, "grad_norm": 1.3241103887557983, "learning_rate": 6.049252398505364e-05, "loss": 3.2921, "step": 245650 }, { "epoch": 2.1697663328564616, "grad_norm": 3.78419828414917, "learning_rate": 6.047893682002802e-05, "loss": 3.1427, "step": 245700 }, { "epoch": 2.1702078807467458, "grad_norm": 3.2498936653137207, "learning_rate": 6.0465348845593394e-05, "loss": 3.0815, "step": 245750 }, { "epoch": 2.17064942863703, "grad_norm": 2.077392578125, "learning_rate": 6.04517600627993e-05, "loss": 2.8622, "step": 245800 }, { "epoch": 2.171090976527314, "grad_norm": 2.2121798992156982, "learning_rate": 6.0438170472695374e-05, "loss": 3.1503, "step": 245850 }, { "epoch": 2.1715325244175983, "grad_norm": 1.9977270364761353, "learning_rate": 6.0424580076331305e-05, "loss": 2.6607, "step": 245900 }, { "epoch": 2.1719740723078824, "grad_norm": 4.047658443450928, "learning_rate": 6.041098887475681e-05, "loss": 3.0044, "step": 245950 }, { "epoch": 2.1724156201981666, "grad_norm": 1.35835862159729, "learning_rate": 6.0397396869021714e-05, "loss": 3.186, "step": 246000 }, { "epoch": 2.1724156201981666, "eval_asr_loss": 0.9392569830705412, "eval_loss": 2.8575477600097656, "eval_runtime": 20.4511, "eval_samples_per_second": 37.553, "eval_steps_per_second": 9.388, "eval_tts_loss": 6.0642424824213865, "step": 246000 }, { "epoch": 2.1728571680884508, "grad_norm": 2.7876996994018555, "learning_rate": 6.038380406017588e-05, "loss": 3.0091, "step": 246050 }, { "epoch": 2.173298715978735, "grad_norm": 23.924245834350586, "learning_rate": 6.0370210449269224e-05, "loss": 3.028, "step": 246100 }, { "epoch": 2.173740263869019, "grad_norm": 2.4650650024414062, "learning_rate": 6.0356616037351755e-05, "loss": 2.8779, "step": 246150 }, { "epoch": 2.1741818117593033, "grad_norm": 1.138063669204712, "learning_rate": 6.034302082547351e-05, "loss": 2.9028, "step": 246200 }, { "epoch": 2.1746233596495874, "grad_norm": 2.4580750465393066, "learning_rate": 6.032942481468462e-05, "loss": 3.2762, "step": 246250 }, { "epoch": 2.1750649075398716, "grad_norm": 4.300107002258301, "learning_rate": 6.0315828006035245e-05, "loss": 3.3666, "step": 246300 }, { "epoch": 2.1755064554301558, "grad_norm": 2.1569507122039795, "learning_rate": 6.0302230400575646e-05, "loss": 3.1456, "step": 246350 }, { "epoch": 2.17594800332044, "grad_norm": 4.602993488311768, "learning_rate": 6.0288631999356104e-05, "loss": 3.2562, "step": 246400 }, { "epoch": 2.1763895512107245, "grad_norm": 4.042548179626465, "learning_rate": 6.0275032803426975e-05, "loss": 2.946, "step": 246450 }, { "epoch": 2.1768310991010087, "grad_norm": 3.4596941471099854, "learning_rate": 6.0261432813838715e-05, "loss": 2.9021, "step": 246500 }, { "epoch": 2.177272646991293, "grad_norm": 6.044607639312744, "learning_rate": 6.024783203164177e-05, "loss": 2.8478, "step": 246550 }, { "epoch": 2.177714194881577, "grad_norm": 2.1571695804595947, "learning_rate": 6.023423045788669e-05, "loss": 3.2925, "step": 246600 }, { "epoch": 2.178155742771861, "grad_norm": 4.901209354400635, "learning_rate": 6.02206280936241e-05, "loss": 3.12, "step": 246650 }, { "epoch": 2.1785972906621454, "grad_norm": 2.6473047733306885, "learning_rate": 6.0207024939904655e-05, "loss": 3.1452, "step": 246700 }, { "epoch": 2.1790388385524295, "grad_norm": 2.9098074436187744, "learning_rate": 6.0193420997779096e-05, "loss": 3.2063, "step": 246750 }, { "epoch": 2.1794803864427137, "grad_norm": 7.190146446228027, "learning_rate": 6.0179816268298194e-05, "loss": 3.2197, "step": 246800 }, { "epoch": 2.179921934332998, "grad_norm": 2.408454418182373, "learning_rate": 6.0166210752512806e-05, "loss": 2.8101, "step": 246850 }, { "epoch": 2.180363482223282, "grad_norm": 5.173770904541016, "learning_rate": 6.015260445147386e-05, "loss": 3.2971, "step": 246900 }, { "epoch": 2.180805030113566, "grad_norm": 4.6776957511901855, "learning_rate": 6.01389973662323e-05, "loss": 3.2507, "step": 246950 }, { "epoch": 2.1812465780038504, "grad_norm": 2.6588592529296875, "learning_rate": 6.012538949783916e-05, "loss": 3.041, "step": 247000 }, { "epoch": 2.1816881258941345, "grad_norm": 2.170778274536133, "learning_rate": 6.011178084734556e-05, "loss": 3.2546, "step": 247050 }, { "epoch": 2.1821296737844187, "grad_norm": 3.928318977355957, "learning_rate": 6.009817141580262e-05, "loss": 3.4263, "step": 247100 }, { "epoch": 2.182571221674703, "grad_norm": 2.583965301513672, "learning_rate": 6.008456120426158e-05, "loss": 2.922, "step": 247150 }, { "epoch": 2.183012769564987, "grad_norm": 2.960022211074829, "learning_rate": 6.007095021377369e-05, "loss": 3.0257, "step": 247200 }, { "epoch": 2.183454317455271, "grad_norm": 1.4887888431549072, "learning_rate": 6.0057338445390296e-05, "loss": 3.0247, "step": 247250 }, { "epoch": 2.1838958653455554, "grad_norm": 2.6487021446228027, "learning_rate": 6.004372590016281e-05, "loss": 3.2311, "step": 247300 }, { "epoch": 2.1843374132358395, "grad_norm": 3.543104648590088, "learning_rate": 6.0030112579142636e-05, "loss": 3.1826, "step": 247350 }, { "epoch": 2.1847789611261237, "grad_norm": 4.436171531677246, "learning_rate": 6.0016498483381345e-05, "loss": 3.0521, "step": 247400 }, { "epoch": 2.185220509016408, "grad_norm": 1.3873144388198853, "learning_rate": 6.000288361393047e-05, "loss": 3.2691, "step": 247450 }, { "epoch": 2.185662056906692, "grad_norm": 2.788475513458252, "learning_rate": 5.998926797184167e-05, "loss": 3.2972, "step": 247500 }, { "epoch": 2.186103604796976, "grad_norm": 5.260395050048828, "learning_rate": 5.997565155816661e-05, "loss": 3.1924, "step": 247550 }, { "epoch": 2.1865451526872604, "grad_norm": 3.8101744651794434, "learning_rate": 5.996203437395706e-05, "loss": 3.6127, "step": 247600 }, { "epoch": 2.1869867005775445, "grad_norm": 6.26643705368042, "learning_rate": 5.994841642026485e-05, "loss": 2.5554, "step": 247650 }, { "epoch": 2.1874282484678287, "grad_norm": 4.825605869293213, "learning_rate": 5.993479769814182e-05, "loss": 3.6132, "step": 247700 }, { "epoch": 2.187869796358113, "grad_norm": 4.736049652099609, "learning_rate": 5.992117820863989e-05, "loss": 2.9763, "step": 247750 }, { "epoch": 2.188311344248397, "grad_norm": 1.331676959991455, "learning_rate": 5.99075579528111e-05, "loss": 3.1284, "step": 247800 }, { "epoch": 2.188752892138681, "grad_norm": 2.1483421325683594, "learning_rate": 5.989393693170746e-05, "loss": 2.9496, "step": 247850 }, { "epoch": 2.1891944400289653, "grad_norm": 3.0510306358337402, "learning_rate": 5.988031514638111e-05, "loss": 3.2423, "step": 247900 }, { "epoch": 2.1896359879192495, "grad_norm": 2.3355166912078857, "learning_rate": 5.9866692597884174e-05, "loss": 3.1941, "step": 247950 }, { "epoch": 2.190077535809534, "grad_norm": 3.830967664718628, "learning_rate": 5.9853069287268906e-05, "loss": 2.9566, "step": 248000 }, { "epoch": 2.190519083699818, "grad_norm": 4.699981212615967, "learning_rate": 5.9839445215587596e-05, "loss": 2.9206, "step": 248050 }, { "epoch": 2.1909606315901025, "grad_norm": 6.072689533233643, "learning_rate": 5.982582038389257e-05, "loss": 3.256, "step": 248100 }, { "epoch": 2.1914021794803866, "grad_norm": 4.4500203132629395, "learning_rate": 5.9812194793236245e-05, "loss": 3.0591, "step": 248150 }, { "epoch": 2.191843727370671, "grad_norm": 3.2074832916259766, "learning_rate": 5.979856844467108e-05, "loss": 2.9361, "step": 248200 }, { "epoch": 2.192285275260955, "grad_norm": 1.263113260269165, "learning_rate": 5.978494133924959e-05, "loss": 3.0833, "step": 248250 }, { "epoch": 2.192726823151239, "grad_norm": 1.6638262271881104, "learning_rate": 5.977131347802435e-05, "loss": 3.2541, "step": 248300 }, { "epoch": 2.1931683710415233, "grad_norm": 2.9127674102783203, "learning_rate": 5.9757684862048004e-05, "loss": 3.2699, "step": 248350 }, { "epoch": 2.1936099189318075, "grad_norm": 1.5334758758544922, "learning_rate": 5.974405549237324e-05, "loss": 3.212, "step": 248400 }, { "epoch": 2.1940514668220916, "grad_norm": 2.567260980606079, "learning_rate": 5.973042537005283e-05, "loss": 3.1895, "step": 248450 }, { "epoch": 2.194493014712376, "grad_norm": 2.907191753387451, "learning_rate": 5.971679449613956e-05, "loss": 3.2427, "step": 248500 }, { "epoch": 2.19493456260266, "grad_norm": 1.528548240661621, "learning_rate": 5.970316287168631e-05, "loss": 3.1348, "step": 248550 }, { "epoch": 2.195376110492944, "grad_norm": 2.61979079246521, "learning_rate": 5.9689530497746023e-05, "loss": 3.1506, "step": 248600 }, { "epoch": 2.1958176583832283, "grad_norm": 1.5362796783447266, "learning_rate": 5.967589737537166e-05, "loss": 2.6354, "step": 248650 }, { "epoch": 2.1962592062735125, "grad_norm": 4.144810676574707, "learning_rate": 5.966226350561628e-05, "loss": 3.2019, "step": 248700 }, { "epoch": 2.1967007541637966, "grad_norm": 2.271742820739746, "learning_rate": 5.964862888953297e-05, "loss": 3.2604, "step": 248750 }, { "epoch": 2.197142302054081, "grad_norm": 2.973482131958008, "learning_rate": 5.96349935281749e-05, "loss": 2.8219, "step": 248800 }, { "epoch": 2.197583849944365, "grad_norm": 4.361788749694824, "learning_rate": 5.9621357422595295e-05, "loss": 3.1846, "step": 248850 }, { "epoch": 2.198025397834649, "grad_norm": 0.705970048904419, "learning_rate": 5.960772057384739e-05, "loss": 3.398, "step": 248900 }, { "epoch": 2.1984669457249333, "grad_norm": 1.652596354484558, "learning_rate": 5.959408298298456e-05, "loss": 3.2226, "step": 248950 }, { "epoch": 2.1989084936152175, "grad_norm": 1.5994527339935303, "learning_rate": 5.958044465106016e-05, "loss": 2.5712, "step": 249000 }, { "epoch": 2.1989084936152175, "eval_asr_loss": 0.9304255538908192, "eval_loss": 2.846482992172241, "eval_runtime": 20.4139, "eval_samples_per_second": 37.621, "eval_steps_per_second": 9.405, "eval_tts_loss": 6.0102194777481515, "step": 249000 }, { "epoch": 2.1993500415055016, "grad_norm": 2.3923439979553223, "learning_rate": 5.956680557912766e-05, "loss": 3.1958, "step": 249050 }, { "epoch": 2.199791589395786, "grad_norm": 3.9689152240753174, "learning_rate": 5.955316576824056e-05, "loss": 3.1784, "step": 249100 }, { "epoch": 2.20023313728607, "grad_norm": 2.680379629135132, "learning_rate": 5.9539525219452396e-05, "loss": 3.2746, "step": 249150 }, { "epoch": 2.200674685176354, "grad_norm": 2.977792978286743, "learning_rate": 5.952588393381682e-05, "loss": 2.9965, "step": 249200 }, { "epoch": 2.2011162330666383, "grad_norm": 7.2773261070251465, "learning_rate": 5.951224191238749e-05, "loss": 3.0737, "step": 249250 }, { "epoch": 2.2015577809569224, "grad_norm": 3.3566043376922607, "learning_rate": 5.9498872018535125e-05, "loss": 3.2456, "step": 249300 }, { "epoch": 2.2019993288472066, "grad_norm": 4.931230068206787, "learning_rate": 5.948522854334293e-05, "loss": 2.9961, "step": 249350 }, { "epoch": 2.202440876737491, "grad_norm": 2.0846118927001953, "learning_rate": 5.9471584335497246e-05, "loss": 3.0981, "step": 249400 }, { "epoch": 2.202882424627775, "grad_norm": 5.715150833129883, "learning_rate": 5.9457939396051985e-05, "loss": 2.7673, "step": 249450 }, { "epoch": 2.203323972518059, "grad_norm": 1.7555757761001587, "learning_rate": 5.944429372606111e-05, "loss": 3.1935, "step": 249500 }, { "epoch": 2.2037655204083433, "grad_norm": 3.721235752105713, "learning_rate": 5.943064732657864e-05, "loss": 3.0966, "step": 249550 }, { "epoch": 2.2042070682986274, "grad_norm": 3.5476677417755127, "learning_rate": 5.9417000198658625e-05, "loss": 2.8752, "step": 249600 }, { "epoch": 2.204648616188912, "grad_norm": 4.5587286949157715, "learning_rate": 5.94033523433552e-05, "loss": 3.0805, "step": 249650 }, { "epoch": 2.2050901640791962, "grad_norm": 4.7551703453063965, "learning_rate": 5.938970376172254e-05, "loss": 2.7924, "step": 249700 }, { "epoch": 2.2055317119694804, "grad_norm": 3.6469037532806396, "learning_rate": 5.93760544548149e-05, "loss": 3.1216, "step": 249750 }, { "epoch": 2.2059732598597646, "grad_norm": 3.2515978813171387, "learning_rate": 5.9362404423686534e-05, "loss": 3.1162, "step": 249800 }, { "epoch": 2.2064148077500487, "grad_norm": 4.0924787521362305, "learning_rate": 5.9348753669391844e-05, "loss": 3.6559, "step": 249850 }, { "epoch": 2.206856355640333, "grad_norm": 2.8159549236297607, "learning_rate": 5.9335102192985194e-05, "loss": 2.7405, "step": 249900 }, { "epoch": 2.207297903530617, "grad_norm": 1.7060962915420532, "learning_rate": 5.9321449995521074e-05, "loss": 3.0616, "step": 249950 }, { "epoch": 2.2077394514209012, "grad_norm": 3.1570303440093994, "learning_rate": 5.930779707805397e-05, "loss": 3.4593, "step": 250000 }, { "epoch": 2.2081809993111854, "grad_norm": 2.408595561981201, "learning_rate": 5.929414344163846e-05, "loss": 3.0959, "step": 250050 }, { "epoch": 2.2086225472014696, "grad_norm": 3.342355966567993, "learning_rate": 5.9280489087329205e-05, "loss": 3.0539, "step": 250100 }, { "epoch": 2.2090640950917537, "grad_norm": 5.067995548248291, "learning_rate": 5.926683401618086e-05, "loss": 3.0701, "step": 250150 }, { "epoch": 2.209505642982038, "grad_norm": 1.7544509172439575, "learning_rate": 5.925317822924815e-05, "loss": 3.0253, "step": 250200 }, { "epoch": 2.209947190872322, "grad_norm": 2.652850866317749, "learning_rate": 5.923952172758591e-05, "loss": 3.3815, "step": 250250 }, { "epoch": 2.210388738762606, "grad_norm": 4.625723838806152, "learning_rate": 5.9225864512248955e-05, "loss": 2.9297, "step": 250300 }, { "epoch": 2.2108302866528904, "grad_norm": 2.095754384994507, "learning_rate": 5.9212206584292196e-05, "loss": 2.9707, "step": 250350 }, { "epoch": 2.2112718345431746, "grad_norm": 5.024804592132568, "learning_rate": 5.9198547944770596e-05, "loss": 3.1168, "step": 250400 }, { "epoch": 2.2117133824334587, "grad_norm": 9.1120023727417, "learning_rate": 5.918488859473916e-05, "loss": 2.9197, "step": 250450 }, { "epoch": 2.212154930323743, "grad_norm": 1.8666355609893799, "learning_rate": 5.9171228535253e-05, "loss": 3.035, "step": 250500 }, { "epoch": 2.212596478214027, "grad_norm": 1.0906444787979126, "learning_rate": 5.9157567767367175e-05, "loss": 3.2451, "step": 250550 }, { "epoch": 2.213038026104311, "grad_norm": 2.957937479019165, "learning_rate": 5.91439062921369e-05, "loss": 2.8599, "step": 250600 }, { "epoch": 2.2134795739945954, "grad_norm": 2.6834259033203125, "learning_rate": 5.913024411061739e-05, "loss": 3.3457, "step": 250650 }, { "epoch": 2.2139211218848796, "grad_norm": 2.9120137691497803, "learning_rate": 5.911658122386397e-05, "loss": 3.1717, "step": 250700 }, { "epoch": 2.2143626697751637, "grad_norm": 1.45148503780365, "learning_rate": 5.9102917632931945e-05, "loss": 3.0067, "step": 250750 }, { "epoch": 2.214804217665448, "grad_norm": 2.60270357131958, "learning_rate": 5.9089253338876715e-05, "loss": 3.1368, "step": 250800 }, { "epoch": 2.215245765555732, "grad_norm": 1.3488702774047852, "learning_rate": 5.907558834275374e-05, "loss": 2.949, "step": 250850 }, { "epoch": 2.215687313446016, "grad_norm": 3.042109727859497, "learning_rate": 5.906192264561855e-05, "loss": 3.3949, "step": 250900 }, { "epoch": 2.2161288613363004, "grad_norm": 1.2870628833770752, "learning_rate": 5.9048256248526644e-05, "loss": 2.9223, "step": 250950 }, { "epoch": 2.2165704092265845, "grad_norm": 4.794906139373779, "learning_rate": 5.9034589152533695e-05, "loss": 3.2492, "step": 251000 }, { "epoch": 2.2170119571168687, "grad_norm": 3.8245151042938232, "learning_rate": 5.902092135869533e-05, "loss": 3.2285, "step": 251050 }, { "epoch": 2.217453505007153, "grad_norm": 0.950536847114563, "learning_rate": 5.90072528680673e-05, "loss": 3.144, "step": 251100 }, { "epoch": 2.217895052897437, "grad_norm": 1.9943549633026123, "learning_rate": 5.8993583681705354e-05, "loss": 2.9898, "step": 251150 }, { "epoch": 2.2183366007877217, "grad_norm": 2.278146982192993, "learning_rate": 5.897991380066533e-05, "loss": 3.2155, "step": 251200 }, { "epoch": 2.218778148678006, "grad_norm": 6.75840425491333, "learning_rate": 5.8966243226003104e-05, "loss": 2.9816, "step": 251250 }, { "epoch": 2.21921969656829, "grad_norm": 2.8531532287597656, "learning_rate": 5.895257195877465e-05, "loss": 3.243, "step": 251300 }, { "epoch": 2.219661244458574, "grad_norm": 3.177061080932617, "learning_rate": 5.8939173445980634e-05, "loss": 3.2117, "step": 251350 }, { "epoch": 2.2201027923488583, "grad_norm": 1.6812225580215454, "learning_rate": 5.892550081058642e-05, "loss": 3.1777, "step": 251400 }, { "epoch": 2.2205443402391425, "grad_norm": 0.9270955920219421, "learning_rate": 5.891182748577294e-05, "loss": 3.4789, "step": 251450 }, { "epoch": 2.2209858881294267, "grad_norm": 1.8096257448196411, "learning_rate": 5.889815347259635e-05, "loss": 2.7918, "step": 251500 }, { "epoch": 2.221427436019711, "grad_norm": 4.176187992095947, "learning_rate": 5.8884478772112864e-05, "loss": 3.036, "step": 251550 }, { "epoch": 2.221868983909995, "grad_norm": 0.8330768346786499, "learning_rate": 5.887080338537873e-05, "loss": 3.2742, "step": 251600 }, { "epoch": 2.222310531800279, "grad_norm": 2.405538558959961, "learning_rate": 5.885712731345026e-05, "loss": 2.8618, "step": 251650 }, { "epoch": 2.2227520796905633, "grad_norm": 5.042022705078125, "learning_rate": 5.884345055738381e-05, "loss": 2.7369, "step": 251700 }, { "epoch": 2.2231936275808475, "grad_norm": 3.783979654312134, "learning_rate": 5.8829773118235774e-05, "loss": 3.2306, "step": 251750 }, { "epoch": 2.2236351754711317, "grad_norm": 2.477170705795288, "learning_rate": 5.881609499706267e-05, "loss": 2.8828, "step": 251800 }, { "epoch": 2.224076723361416, "grad_norm": 1.5696929693222046, "learning_rate": 5.8802416194920985e-05, "loss": 3.2756, "step": 251850 }, { "epoch": 2.2245182712517, "grad_norm": 2.808018684387207, "learning_rate": 5.8788736712867265e-05, "loss": 3.8227, "step": 251900 }, { "epoch": 2.224959819141984, "grad_norm": 4.302034854888916, "learning_rate": 5.877505655195818e-05, "loss": 3.4848, "step": 251950 }, { "epoch": 2.2254013670322683, "grad_norm": 1.6220154762268066, "learning_rate": 5.8761375713250376e-05, "loss": 3.2193, "step": 252000 }, { "epoch": 2.2254013670322683, "eval_asr_loss": 0.9349989803252292, "eval_loss": 2.84177303314209, "eval_runtime": 20.8268, "eval_samples_per_second": 36.876, "eval_steps_per_second": 9.219, "eval_tts_loss": 6.0021888255360905, "step": 252000 }, { "epoch": 2.2258429149225525, "grad_norm": 2.4672141075134277, "learning_rate": 5.874769419780061e-05, "loss": 3.1268, "step": 252050 }, { "epoch": 2.2262844628128367, "grad_norm": 0.6886611580848694, "learning_rate": 5.873401200666563e-05, "loss": 2.5239, "step": 252100 }, { "epoch": 2.226726010703121, "grad_norm": 2.589230537414551, "learning_rate": 5.8720329140902284e-05, "loss": 3.2045, "step": 252150 }, { "epoch": 2.227167558593405, "grad_norm": 1.5318831205368042, "learning_rate": 5.870664560156747e-05, "loss": 3.0298, "step": 252200 }, { "epoch": 2.227609106483689, "grad_norm": 2.816803216934204, "learning_rate": 5.869296138971809e-05, "loss": 3.2201, "step": 252250 }, { "epoch": 2.2280506543739733, "grad_norm": 2.9629454612731934, "learning_rate": 5.8679276506411184e-05, "loss": 3.2301, "step": 252300 }, { "epoch": 2.2284922022642575, "grad_norm": 3.7039413452148438, "learning_rate": 5.866559095270375e-05, "loss": 3.3573, "step": 252350 }, { "epoch": 2.2289337501545416, "grad_norm": 4.958855152130127, "learning_rate": 5.865190472965291e-05, "loss": 2.9811, "step": 252400 }, { "epoch": 2.229375298044826, "grad_norm": 4.706749439239502, "learning_rate": 5.8638491582684894e-05, "loss": 2.9408, "step": 252450 }, { "epoch": 2.22981684593511, "grad_norm": 2.872765064239502, "learning_rate": 5.8624804037452916e-05, "loss": 3.0163, "step": 252500 }, { "epoch": 2.230258393825394, "grad_norm": 2.8682596683502197, "learning_rate": 5.861111582602796e-05, "loss": 3.4345, "step": 252550 }, { "epoch": 2.2306999417156783, "grad_norm": 2.9680938720703125, "learning_rate": 5.8597426949467325e-05, "loss": 3.4903, "step": 252600 }, { "epoch": 2.2311414896059625, "grad_norm": 1.2532522678375244, "learning_rate": 5.858373740882837e-05, "loss": 2.7181, "step": 252650 }, { "epoch": 2.2315830374962466, "grad_norm": 1.0817298889160156, "learning_rate": 5.8570047205168475e-05, "loss": 2.8678, "step": 252700 }, { "epoch": 2.2320245853865313, "grad_norm": 4.356616497039795, "learning_rate": 5.8556356339545124e-05, "loss": 2.9539, "step": 252750 }, { "epoch": 2.232466133276815, "grad_norm": 1.9056625366210938, "learning_rate": 5.854266481301578e-05, "loss": 3.1982, "step": 252800 }, { "epoch": 2.2329076811670996, "grad_norm": 3.5170702934265137, "learning_rate": 5.8528972626638035e-05, "loss": 2.9899, "step": 252850 }, { "epoch": 2.2333492290573838, "grad_norm": 2.9625301361083984, "learning_rate": 5.851527978146948e-05, "loss": 3.3675, "step": 252900 }, { "epoch": 2.233790776947668, "grad_norm": 2.527756929397583, "learning_rate": 5.8501586278567755e-05, "loss": 2.8116, "step": 252950 }, { "epoch": 2.234232324837952, "grad_norm": 3.2555031776428223, "learning_rate": 5.848789211899058e-05, "loss": 2.7627, "step": 253000 }, { "epoch": 2.2346738727282363, "grad_norm": 1.6485633850097656, "learning_rate": 5.847419730379572e-05, "loss": 3.2575, "step": 253050 }, { "epoch": 2.2351154206185204, "grad_norm": 2.901740074157715, "learning_rate": 5.846050183404099e-05, "loss": 3.4547, "step": 253100 }, { "epoch": 2.2355569685088046, "grad_norm": 1.9311729669570923, "learning_rate": 5.8446805710784226e-05, "loss": 3.0989, "step": 253150 }, { "epoch": 2.2359985163990888, "grad_norm": 5.239856243133545, "learning_rate": 5.843310893508333e-05, "loss": 3.2647, "step": 253200 }, { "epoch": 2.236440064289373, "grad_norm": 3.5403478145599365, "learning_rate": 5.84194115079963e-05, "loss": 3.1479, "step": 253250 }, { "epoch": 2.236881612179657, "grad_norm": 2.1544852256774902, "learning_rate": 5.840571343058113e-05, "loss": 3.427, "step": 253300 }, { "epoch": 2.2373231600699413, "grad_norm": 2.9125540256500244, "learning_rate": 5.839201470389587e-05, "loss": 3.4527, "step": 253350 }, { "epoch": 2.2377647079602254, "grad_norm": 3.3283560276031494, "learning_rate": 5.837831532899863e-05, "loss": 3.0141, "step": 253400 }, { "epoch": 2.2382062558505096, "grad_norm": 4.432610511779785, "learning_rate": 5.836461530694759e-05, "loss": 2.6997, "step": 253450 }, { "epoch": 2.2386478037407938, "grad_norm": 3.704026699066162, "learning_rate": 5.835091463880094e-05, "loss": 3.5248, "step": 253500 }, { "epoch": 2.239089351631078, "grad_norm": 6.4238810539245605, "learning_rate": 5.833721332561694e-05, "loss": 3.3761, "step": 253550 }, { "epoch": 2.239530899521362, "grad_norm": 3.5807857513427734, "learning_rate": 5.8323511368453906e-05, "loss": 3.2106, "step": 253600 }, { "epoch": 2.2399724474116463, "grad_norm": 4.876890659332275, "learning_rate": 5.8309808768370214e-05, "loss": 3.2801, "step": 253650 }, { "epoch": 2.2404139953019304, "grad_norm": 4.392118453979492, "learning_rate": 5.829610552642426e-05, "loss": 3.2126, "step": 253700 }, { "epoch": 2.2408555431922146, "grad_norm": 3.744412422180176, "learning_rate": 5.82824016436745e-05, "loss": 3.1052, "step": 253750 }, { "epoch": 2.2412970910824987, "grad_norm": 3.0900723934173584, "learning_rate": 5.826869712117944e-05, "loss": 3.1165, "step": 253800 }, { "epoch": 2.241738638972783, "grad_norm": 2.942573308944702, "learning_rate": 5.825499195999765e-05, "loss": 3.0981, "step": 253850 }, { "epoch": 2.242180186863067, "grad_norm": 2.052508592605591, "learning_rate": 5.8241286161187737e-05, "loss": 3.4993, "step": 253900 }, { "epoch": 2.2426217347533512, "grad_norm": 2.6822502613067627, "learning_rate": 5.822757972580832e-05, "loss": 3.0703, "step": 253950 }, { "epoch": 2.2430632826436354, "grad_norm": 2.0350875854492188, "learning_rate": 5.821387265491817e-05, "loss": 3.3772, "step": 254000 }, { "epoch": 2.2435048305339196, "grad_norm": 4.792269229888916, "learning_rate": 5.8200164949575996e-05, "loss": 2.8125, "step": 254050 }, { "epoch": 2.2439463784242037, "grad_norm": 5.7412285804748535, "learning_rate": 5.8186456610840624e-05, "loss": 2.6822, "step": 254100 }, { "epoch": 2.244387926314488, "grad_norm": 7.473790168762207, "learning_rate": 5.81727476397709e-05, "loss": 3.1313, "step": 254150 }, { "epoch": 2.244829474204772, "grad_norm": 3.2918715476989746, "learning_rate": 5.8159038037425704e-05, "loss": 3.1051, "step": 254200 }, { "epoch": 2.2452710220950562, "grad_norm": 1.7990907430648804, "learning_rate": 5.814532780486402e-05, "loss": 3.1019, "step": 254250 }, { "epoch": 2.2457125699853404, "grad_norm": 3.1198318004608154, "learning_rate": 5.813161694314484e-05, "loss": 3.4328, "step": 254300 }, { "epoch": 2.2461541178756246, "grad_norm": 2.5575218200683594, "learning_rate": 5.811790545332719e-05, "loss": 3.2553, "step": 254350 }, { "epoch": 2.246595665765909, "grad_norm": 1.4484281539916992, "learning_rate": 5.810419333647019e-05, "loss": 3.2154, "step": 254400 }, { "epoch": 2.2470372136561934, "grad_norm": 4.266369342803955, "learning_rate": 5.809048059363298e-05, "loss": 3.2351, "step": 254450 }, { "epoch": 2.2474787615464775, "grad_norm": 2.723226308822632, "learning_rate": 5.807676722587474e-05, "loss": 3.0583, "step": 254500 }, { "epoch": 2.2479203094367617, "grad_norm": 2.533464193344116, "learning_rate": 5.806305323425473e-05, "loss": 2.9562, "step": 254550 }, { "epoch": 2.248361857327046, "grad_norm": 3.364504814147949, "learning_rate": 5.804933861983222e-05, "loss": 3.337, "step": 254600 }, { "epoch": 2.24880340521733, "grad_norm": 2.2846782207489014, "learning_rate": 5.803562338366657e-05, "loss": 2.9944, "step": 254650 }, { "epoch": 2.249244953107614, "grad_norm": 1.4509943723678589, "learning_rate": 5.8021907526817156e-05, "loss": 3.4156, "step": 254700 }, { "epoch": 2.2496865009978984, "grad_norm": 1.3979560136795044, "learning_rate": 5.800819105034338e-05, "loss": 3.1632, "step": 254750 }, { "epoch": 2.2501280488881825, "grad_norm": 6.0780110359191895, "learning_rate": 5.7994473955304786e-05, "loss": 2.7354, "step": 254800 }, { "epoch": 2.2505695967784667, "grad_norm": 2.125615119934082, "learning_rate": 5.798075624276085e-05, "loss": 2.7818, "step": 254850 }, { "epoch": 2.251011144668751, "grad_norm": 6.371503829956055, "learning_rate": 5.7967037913771185e-05, "loss": 3.0069, "step": 254900 }, { "epoch": 2.251452692559035, "grad_norm": 6.355823993682861, "learning_rate": 5.795331896939539e-05, "loss": 3.3613, "step": 254950 }, { "epoch": 2.251894240449319, "grad_norm": 3.9638240337371826, "learning_rate": 5.7939599410693134e-05, "loss": 3.2695, "step": 255000 }, { "epoch": 2.251894240449319, "eval_asr_loss": 0.9167550158655973, "eval_loss": 2.836369752883911, "eval_runtime": 20.729, "eval_samples_per_second": 37.05, "eval_steps_per_second": 9.262, "eval_tts_loss": 6.040006003285445, "step": 255000 } ], "logging_steps": 50, "max_steps": 566190, "num_input_tokens_seen": 0, "num_train_epochs": 5, "save_steps": 3000, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 2.4268119972710973e+18, "train_batch_size": 4, "trial_name": null, "trial_params": null }