| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9999880000239999, | |
| "global_step": 33333, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9249992499925e-05, | |
| "loss": 1.253, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.849998499985e-05, | |
| "loss": 1.2489, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.7749977499775e-05, | |
| "loss": 1.2349, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.69999699997e-05, | |
| "loss": 1.2296, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.6249962499625e-05, | |
| "loss": 1.2276, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.549995499955e-05, | |
| "loss": 1.2195, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.4749947499475e-05, | |
| "loss": 1.2111, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.39999399994e-05, | |
| "loss": 1.2111, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.3249932499324996e-05, | |
| "loss": 1.21, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.2499924999249994e-05, | |
| "loss": 1.2027, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.174991749917499e-05, | |
| "loss": 1.2024, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.09999099991e-05, | |
| "loss": 1.2017, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.0249902499024996e-05, | |
| "loss": 1.2009, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 3.9499894998949995e-05, | |
| "loss": 1.192, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.8749887498874993e-05, | |
| "loss": 1.1969, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 3.799987999879999e-05, | |
| "loss": 1.1949, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.724987249872499e-05, | |
| "loss": 1.1974, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.649986499864999e-05, | |
| "loss": 1.187, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.574985749857499e-05, | |
| "loss": 1.1831, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.4999849998499986e-05, | |
| "loss": 1.183, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.4249842498424984e-05, | |
| "loss": 1.1819, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.349983499834998e-05, | |
| "loss": 1.1824, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.274982749827498e-05, | |
| "loss": 1.1832, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.199981999819998e-05, | |
| "loss": 1.1781, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.124981249812498e-05, | |
| "loss": 1.1775, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.049980499804998e-05, | |
| "loss": 1.1699, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.974979749797498e-05, | |
| "loss": 1.1687, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.8999789997899977e-05, | |
| "loss": 1.1653, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.824978249782498e-05, | |
| "loss": 1.1712, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.7499774997749978e-05, | |
| "loss": 1.1671, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.6749767497674976e-05, | |
| "loss": 1.1634, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.5999759997599975e-05, | |
| "loss": 1.1667, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.5249752497524977e-05, | |
| "loss": 1.159, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.4499744997449975e-05, | |
| "loss": 1.1569, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.3749737497374974e-05, | |
| "loss": 1.1556, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2999729997299972e-05, | |
| "loss": 1.1603, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2249722497224974e-05, | |
| "loss": 1.1533, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.1499714997149973e-05, | |
| "loss": 1.1536, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.074970749707497e-05, | |
| "loss": 1.1563, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.999969999699997e-05, | |
| "loss": 1.1494, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.924969249692497e-05, | |
| "loss": 1.1511, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.849968499684997e-05, | |
| "loss": 1.1432, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.774967749677497e-05, | |
| "loss": 1.1525, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.6999669996699967e-05, | |
| "loss": 1.1437, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.624966249662497e-05, | |
| "loss": 1.1452, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5499654996549967e-05, | |
| "loss": 1.1411, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.4749647496474966e-05, | |
| "loss": 1.1381, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.3999639996399966e-05, | |
| "loss": 1.1412, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.3249632496324964e-05, | |
| "loss": 1.1405, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.2499624996249963e-05, | |
| "loss": 1.1418, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.1749617496174963e-05, | |
| "loss": 1.1351, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.0999609996099962e-05, | |
| "loss": 1.1364, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.0249602496024962e-05, | |
| "loss": 1.1322, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.49959499594996e-06, | |
| "loss": 1.1337, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 8.749587495874959e-06, | |
| "loss": 1.1322, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 7.999579995799957e-06, | |
| "loss": 1.128, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 7.249572495724958e-06, | |
| "loss": 1.1318, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 6.499564995649957e-06, | |
| "loss": 1.1283, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 5.749557495574956e-06, | |
| "loss": 1.1262, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.999549995499956e-06, | |
| "loss": 1.1295, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.249542495424955e-06, | |
| "loss": 1.125, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 3.499534995349954e-06, | |
| "loss": 1.1241, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.749527495274953e-06, | |
| "loss": 1.1155, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.999519995199952e-06, | |
| "loss": 1.1226, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.2495124951249513e-06, | |
| "loss": 1.1281, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.995049950499505e-07, | |
| "loss": 1.1218, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_bleu": 33.5469, | |
| "eval_gen_len": 17.7505, | |
| "eval_loss": 1.3453229665756226, | |
| "eval_runtime": 449.9191, | |
| "eval_samples_per_second": 4.445, | |
| "step": 33333 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 33333, | |
| "total_flos": 2.7716100970398106e+18, | |
| "train_runtime": 119333.5912, | |
| "train_samples_per_second": 0.279 | |
| } | |
| ], | |
| "max_steps": 33333, | |
| "num_train_epochs": 1, | |
| "total_flos": 2.7716100970398106e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |