| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 200, | |
| "global_step": 668, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0029940119760479044, | |
| "grad_norm": 3.93724794742837, | |
| "learning_rate": 9.999944704978835e-06, | |
| "loss": 0.3856, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.005988023952095809, | |
| "grad_norm": 2.531423343556236, | |
| "learning_rate": 9.999778821138357e-06, | |
| "loss": 0.317, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.008982035928143712, | |
| "grad_norm": 1.9182361568955064, | |
| "learning_rate": 9.999502352147583e-06, | |
| "loss": 0.2851, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.011976047904191617, | |
| "grad_norm": 1.886457878434698, | |
| "learning_rate": 9.999115304121459e-06, | |
| "loss": 0.2473, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.014970059880239521, | |
| "grad_norm": 1.741650226221636, | |
| "learning_rate": 9.998617685620715e-06, | |
| "loss": 0.2225, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.017964071856287425, | |
| "grad_norm": 1.5880774433229967, | |
| "learning_rate": 9.998009507651683e-06, | |
| "loss": 0.2053, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.020958083832335328, | |
| "grad_norm": 1.591338095787635, | |
| "learning_rate": 9.997290783666048e-06, | |
| "loss": 0.23, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.023952095808383235, | |
| "grad_norm": 1.518819135524427, | |
| "learning_rate": 9.996461529560553e-06, | |
| "loss": 0.2184, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02694610778443114, | |
| "grad_norm": 1.5710126060174174, | |
| "learning_rate": 9.995521763676645e-06, | |
| "loss": 0.2491, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.029940119760479042, | |
| "grad_norm": 1.6242241659954166, | |
| "learning_rate": 9.994471506800078e-06, | |
| "loss": 0.1964, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03293413173652695, | |
| "grad_norm": 1.4321532456590191, | |
| "learning_rate": 9.993310782160439e-06, | |
| "loss": 0.2156, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.03592814371257485, | |
| "grad_norm": 1.6204967851148584, | |
| "learning_rate": 9.992039615430648e-06, | |
| "loss": 0.2098, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.038922155688622756, | |
| "grad_norm": 1.5629035524873522, | |
| "learning_rate": 9.99065803472638e-06, | |
| "loss": 0.2202, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.041916167664670656, | |
| "grad_norm": 1.5242699489236073, | |
| "learning_rate": 9.989166070605447e-06, | |
| "loss": 0.2066, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.04491017964071856, | |
| "grad_norm": 1.5932566916520994, | |
| "learning_rate": 9.98756375606713e-06, | |
| "loss": 0.2017, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.04790419161676647, | |
| "grad_norm": 1.581692620800958, | |
| "learning_rate": 9.985851126551428e-06, | |
| "loss": 0.2086, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.05089820359281437, | |
| "grad_norm": 1.3591000958118666, | |
| "learning_rate": 9.9840282199383e-06, | |
| "loss": 0.1737, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.05389221556886228, | |
| "grad_norm": 1.463270604026187, | |
| "learning_rate": 9.982095076546806e-06, | |
| "loss": 0.2202, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.05688622754491018, | |
| "grad_norm": 1.8791794702977491, | |
| "learning_rate": 9.980051739134235e-06, | |
| "loss": 0.2357, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.059880239520958084, | |
| "grad_norm": 1.4101644781632705, | |
| "learning_rate": 9.977898252895133e-06, | |
| "loss": 0.2106, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06287425149700598, | |
| "grad_norm": 1.501644136763878, | |
| "learning_rate": 9.975634665460333e-06, | |
| "loss": 0.2107, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.0658682634730539, | |
| "grad_norm": 1.5630522622264376, | |
| "learning_rate": 9.973261026895878e-06, | |
| "loss": 0.2224, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.0688622754491018, | |
| "grad_norm": 1.4433503176969218, | |
| "learning_rate": 9.970777389701927e-06, | |
| "loss": 0.2093, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.0718562874251497, | |
| "grad_norm": 1.3847164167667723, | |
| "learning_rate": 9.968183808811586e-06, | |
| "loss": 0.1882, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.0748502994011976, | |
| "grad_norm": 1.634172305432252, | |
| "learning_rate": 9.965480341589702e-06, | |
| "loss": 0.2026, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.07784431137724551, | |
| "grad_norm": 1.6113944724383462, | |
| "learning_rate": 9.962667047831585e-06, | |
| "loss": 0.2205, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08083832335329341, | |
| "grad_norm": 1.4868003864651482, | |
| "learning_rate": 9.95974398976169e-06, | |
| "loss": 0.2238, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.08383233532934131, | |
| "grad_norm": 1.5622527733627718, | |
| "learning_rate": 9.95671123203224e-06, | |
| "loss": 0.2115, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.08682634730538923, | |
| "grad_norm": 1.5716057922410314, | |
| "learning_rate": 9.953568841721796e-06, | |
| "loss": 0.2009, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.08982035928143713, | |
| "grad_norm": 1.3409662705936514, | |
| "learning_rate": 9.950316888333775e-06, | |
| "loss": 0.1992, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09281437125748503, | |
| "grad_norm": 1.2172235547899086, | |
| "learning_rate": 9.946955443794908e-06, | |
| "loss": 0.1875, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.09580838323353294, | |
| "grad_norm": 1.5935270433606625, | |
| "learning_rate": 9.943484582453653e-06, | |
| "loss": 0.2125, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.09880239520958084, | |
| "grad_norm": 1.4872847513219196, | |
| "learning_rate": 9.939904381078553e-06, | |
| "loss": 0.1846, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.10179640718562874, | |
| "grad_norm": 1.6558253795339608, | |
| "learning_rate": 9.93621491885653e-06, | |
| "loss": 0.2308, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.10479041916167664, | |
| "grad_norm": 1.4737014330284115, | |
| "learning_rate": 9.932416277391144e-06, | |
| "loss": 0.1852, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.10778443113772455, | |
| "grad_norm": 1.4638204560891173, | |
| "learning_rate": 9.928508540700775e-06, | |
| "loss": 0.2106, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.11077844311377245, | |
| "grad_norm": 1.6435758430933987, | |
| "learning_rate": 9.924491795216777e-06, | |
| "loss": 0.2289, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.11377245508982035, | |
| "grad_norm": 1.4912053018392504, | |
| "learning_rate": 9.920366129781564e-06, | |
| "loss": 0.1955, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.11676646706586827, | |
| "grad_norm": 1.1676299422758483, | |
| "learning_rate": 9.916131635646635e-06, | |
| "loss": 0.1722, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.11976047904191617, | |
| "grad_norm": 1.5060030491204137, | |
| "learning_rate": 9.91178840647057e-06, | |
| "loss": 0.2126, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12275449101796407, | |
| "grad_norm": 1.3783418806975334, | |
| "learning_rate": 9.907336538316946e-06, | |
| "loss": 0.2019, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.12574850299401197, | |
| "grad_norm": 1.2220766422750977, | |
| "learning_rate": 9.902776129652223e-06, | |
| "loss": 0.1564, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.12874251497005987, | |
| "grad_norm": 1.4132213037452304, | |
| "learning_rate": 9.898107281343557e-06, | |
| "loss": 0.1928, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.1317365269461078, | |
| "grad_norm": 1.3249808392335245, | |
| "learning_rate": 9.893330096656576e-06, | |
| "loss": 0.1649, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1347305389221557, | |
| "grad_norm": 1.4277858852491792, | |
| "learning_rate": 9.888444681253087e-06, | |
| "loss": 0.1792, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1377245508982036, | |
| "grad_norm": 1.6556419759624716, | |
| "learning_rate": 9.883451143188753e-06, | |
| "loss": 0.2087, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1407185628742515, | |
| "grad_norm": 1.4011879303540493, | |
| "learning_rate": 9.878349592910694e-06, | |
| "loss": 0.1729, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1437125748502994, | |
| "grad_norm": 1.7563808472743958, | |
| "learning_rate": 9.873140143255035e-06, | |
| "loss": 0.233, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.1467065868263473, | |
| "grad_norm": 1.501330443218464, | |
| "learning_rate": 9.867822909444435e-06, | |
| "loss": 0.2062, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1497005988023952, | |
| "grad_norm": 1.4028671523616945, | |
| "learning_rate": 9.862398009085511e-06, | |
| "loss": 0.1812, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.15269461077844312, | |
| "grad_norm": 1.4848763193642305, | |
| "learning_rate": 9.856865562166256e-06, | |
| "loss": 0.2011, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.15568862275449102, | |
| "grad_norm": 1.2935948893917035, | |
| "learning_rate": 9.851225691053382e-06, | |
| "loss": 0.182, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.15868263473053892, | |
| "grad_norm": 1.5380336864789779, | |
| "learning_rate": 9.8454785204896e-06, | |
| "loss": 0.213, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.16167664670658682, | |
| "grad_norm": 1.2461757306089647, | |
| "learning_rate": 9.83962417759088e-06, | |
| "loss": 0.1659, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.16467065868263472, | |
| "grad_norm": 1.4437257194682225, | |
| "learning_rate": 9.833662791843628e-06, | |
| "loss": 0.192, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.16766467065868262, | |
| "grad_norm": 1.4307135180501935, | |
| "learning_rate": 9.827594495101824e-06, | |
| "loss": 0.1948, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.17065868263473055, | |
| "grad_norm": 1.5531092685306045, | |
| "learning_rate": 9.821419421584108e-06, | |
| "loss": 0.2068, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.17365269461077845, | |
| "grad_norm": 1.5872727899490777, | |
| "learning_rate": 9.815137707870806e-06, | |
| "loss": 0.2369, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.17664670658682635, | |
| "grad_norm": 1.6050171478047066, | |
| "learning_rate": 9.808749492900917e-06, | |
| "loss": 0.1989, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.17964071856287425, | |
| "grad_norm": 1.523037446145965, | |
| "learning_rate": 9.802254917969033e-06, | |
| "loss": 0.2011, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.18263473053892215, | |
| "grad_norm": 1.6642985322975656, | |
| "learning_rate": 9.795654126722218e-06, | |
| "loss": 0.1992, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.18562874251497005, | |
| "grad_norm": 1.4489881866635752, | |
| "learning_rate": 9.788947265156828e-06, | |
| "loss": 0.1797, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.18862275449101795, | |
| "grad_norm": 1.3602900812884753, | |
| "learning_rate": 9.782134481615282e-06, | |
| "loss": 0.1591, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.19161676646706588, | |
| "grad_norm": 1.4365332506715853, | |
| "learning_rate": 9.775215926782788e-06, | |
| "loss": 0.2005, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.19461077844311378, | |
| "grad_norm": 1.5481049862457326, | |
| "learning_rate": 9.768191753683997e-06, | |
| "loss": 0.1961, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.19760479041916168, | |
| "grad_norm": 1.3673229020435045, | |
| "learning_rate": 9.761062117679632e-06, | |
| "loss": 0.183, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.20059880239520958, | |
| "grad_norm": 1.390542504175092, | |
| "learning_rate": 9.75382717646304e-06, | |
| "loss": 0.1806, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.20359281437125748, | |
| "grad_norm": 1.4300330967995394, | |
| "learning_rate": 9.746487090056712e-06, | |
| "loss": 0.1851, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.20658682634730538, | |
| "grad_norm": 1.2674738573934505, | |
| "learning_rate": 9.739042020808746e-06, | |
| "loss": 0.1657, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.20958083832335328, | |
| "grad_norm": 1.400884448983278, | |
| "learning_rate": 9.73149213338924e-06, | |
| "loss": 0.1897, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2125748502994012, | |
| "grad_norm": 1.5392476669930033, | |
| "learning_rate": 9.72383759478667e-06, | |
| "loss": 0.2179, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.2155688622754491, | |
| "grad_norm": 1.2936017969279172, | |
| "learning_rate": 9.71607857430419e-06, | |
| "loss": 0.1518, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.218562874251497, | |
| "grad_norm": 1.5561346225585322, | |
| "learning_rate": 9.708215243555875e-06, | |
| "loss": 0.2181, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.2215568862275449, | |
| "grad_norm": 1.4228009793706364, | |
| "learning_rate": 9.700247776462944e-06, | |
| "loss": 0.1803, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.2245508982035928, | |
| "grad_norm": 1.5359748610960158, | |
| "learning_rate": 9.6921763492499e-06, | |
| "loss": 0.1998, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.2275449101796407, | |
| "grad_norm": 1.2508741160023198, | |
| "learning_rate": 9.68400114044064e-06, | |
| "loss": 0.1583, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.23053892215568864, | |
| "grad_norm": 1.37666283461974, | |
| "learning_rate": 9.6757223308545e-06, | |
| "loss": 0.1661, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.23353293413173654, | |
| "grad_norm": 1.4034916271796498, | |
| "learning_rate": 9.667340103602263e-06, | |
| "loss": 0.195, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.23652694610778444, | |
| "grad_norm": 1.297114622990894, | |
| "learning_rate": 9.658854644082099e-06, | |
| "loss": 0.1739, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.23952095808383234, | |
| "grad_norm": 1.6568485579280952, | |
| "learning_rate": 9.650266139975474e-06, | |
| "loss": 0.2169, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.24251497005988024, | |
| "grad_norm": 1.3031702739097326, | |
| "learning_rate": 9.641574781242999e-06, | |
| "loss": 0.1737, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.24550898203592814, | |
| "grad_norm": 1.3372507967443468, | |
| "learning_rate": 9.632780760120217e-06, | |
| "loss": 0.1613, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.24850299401197604, | |
| "grad_norm": 1.3086788986597602, | |
| "learning_rate": 9.62388427111336e-06, | |
| "loss": 0.1707, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.25149700598802394, | |
| "grad_norm": 1.3095520634669926, | |
| "learning_rate": 9.614885510995047e-06, | |
| "loss": 0.1602, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.25449101796407186, | |
| "grad_norm": 1.16703339010408, | |
| "learning_rate": 9.605784678799934e-06, | |
| "loss": 0.1368, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.25748502994011974, | |
| "grad_norm": 1.5643809252416627, | |
| "learning_rate": 9.596581975820304e-06, | |
| "loss": 0.171, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.26047904191616766, | |
| "grad_norm": 1.4447729370847562, | |
| "learning_rate": 9.587277605601617e-06, | |
| "loss": 0.1865, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.2634730538922156, | |
| "grad_norm": 1.8441944049305472, | |
| "learning_rate": 9.577871773938013e-06, | |
| "loss": 0.2015, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.26646706586826346, | |
| "grad_norm": 1.5559099207926539, | |
| "learning_rate": 9.568364688867757e-06, | |
| "loss": 0.2216, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.2694610778443114, | |
| "grad_norm": 1.341445085577381, | |
| "learning_rate": 9.558756560668637e-06, | |
| "loss": 0.1601, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.27245508982035926, | |
| "grad_norm": 1.3668565270723219, | |
| "learning_rate": 9.549047601853313e-06, | |
| "loss": 0.1745, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2754491017964072, | |
| "grad_norm": 1.513308131532101, | |
| "learning_rate": 9.539238027164618e-06, | |
| "loss": 0.1913, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.27844311377245506, | |
| "grad_norm": 1.373583891627503, | |
| "learning_rate": 9.52932805357081e-06, | |
| "loss": 0.1591, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.281437125748503, | |
| "grad_norm": 1.4016365555423405, | |
| "learning_rate": 9.519317900260769e-06, | |
| "loss": 0.1805, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2844311377245509, | |
| "grad_norm": 1.2523166754790305, | |
| "learning_rate": 9.509207788639148e-06, | |
| "loss": 0.166, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.2874251497005988, | |
| "grad_norm": 1.2674512425591493, | |
| "learning_rate": 9.498997942321484e-06, | |
| "loss": 0.1852, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2904191616766467, | |
| "grad_norm": 1.2171889125564206, | |
| "learning_rate": 9.488688587129243e-06, | |
| "loss": 0.1628, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.2934131736526946, | |
| "grad_norm": 1.3268937638559497, | |
| "learning_rate": 9.47827995108483e-06, | |
| "loss": 0.1554, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.2964071856287425, | |
| "grad_norm": 1.5606207091771287, | |
| "learning_rate": 9.467772264406545e-06, | |
| "loss": 0.1948, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.2994011976047904, | |
| "grad_norm": 1.2968165695379275, | |
| "learning_rate": 9.457165759503492e-06, | |
| "loss": 0.1709, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3023952095808383, | |
| "grad_norm": 1.4510166923501213, | |
| "learning_rate": 9.446460670970436e-06, | |
| "loss": 0.1509, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.30538922155688625, | |
| "grad_norm": 1.2472836591992684, | |
| "learning_rate": 9.435657235582616e-06, | |
| "loss": 0.1439, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.3083832335329341, | |
| "grad_norm": 1.6351321710185747, | |
| "learning_rate": 9.424755692290507e-06, | |
| "loss": 0.18, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.31137724550898205, | |
| "grad_norm": 1.3860153284957377, | |
| "learning_rate": 9.413756282214538e-06, | |
| "loss": 0.184, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3143712574850299, | |
| "grad_norm": 1.334563801641187, | |
| "learning_rate": 9.402659248639749e-06, | |
| "loss": 0.1755, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.31736526946107785, | |
| "grad_norm": 1.4476097940115753, | |
| "learning_rate": 9.391464837010428e-06, | |
| "loss": 0.1841, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.3203592814371258, | |
| "grad_norm": 1.7182430440178245, | |
| "learning_rate": 9.380173294924661e-06, | |
| "loss": 0.2012, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.32335329341317365, | |
| "grad_norm": 1.5515938836756296, | |
| "learning_rate": 9.368784872128877e-06, | |
| "loss": 0.2056, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.3263473053892216, | |
| "grad_norm": 1.510626715661151, | |
| "learning_rate": 9.357299820512305e-06, | |
| "loss": 0.1803, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.32934131736526945, | |
| "grad_norm": 1.4475109881678339, | |
| "learning_rate": 9.345718394101412e-06, | |
| "loss": 0.1903, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3323353293413174, | |
| "grad_norm": 1.6454914818008244, | |
| "learning_rate": 9.334040849054288e-06, | |
| "loss": 0.2252, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.33532934131736525, | |
| "grad_norm": 1.4593993949984219, | |
| "learning_rate": 9.322267443654974e-06, | |
| "loss": 0.1902, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.3383233532934132, | |
| "grad_norm": 1.5179640054359906, | |
| "learning_rate": 9.310398438307747e-06, | |
| "loss": 0.2123, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.3413173652694611, | |
| "grad_norm": 1.2929729493471633, | |
| "learning_rate": 9.29843409553137e-06, | |
| "loss": 0.1679, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.344311377245509, | |
| "grad_norm": 1.3551179495148795, | |
| "learning_rate": 9.286374679953278e-06, | |
| "loss": 0.17, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3473053892215569, | |
| "grad_norm": 1.388980396399703, | |
| "learning_rate": 9.274220458303727e-06, | |
| "loss": 0.1658, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.3502994011976048, | |
| "grad_norm": 1.6332492947640964, | |
| "learning_rate": 9.261971699409893e-06, | |
| "loss": 0.202, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.3532934131736527, | |
| "grad_norm": 1.5559599715444676, | |
| "learning_rate": 9.249628674189928e-06, | |
| "loss": 0.1886, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.3562874251497006, | |
| "grad_norm": 1.474411371361195, | |
| "learning_rate": 9.237191655646972e-06, | |
| "loss": 0.1894, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.3592814371257485, | |
| "grad_norm": 1.5589673655434235, | |
| "learning_rate": 9.224660918863104e-06, | |
| "loss": 0.1726, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.36227544910179643, | |
| "grad_norm": 1.3759348747522877, | |
| "learning_rate": 9.212036740993265e-06, | |
| "loss": 0.1914, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.3652694610778443, | |
| "grad_norm": 1.455780095293215, | |
| "learning_rate": 9.199319401259132e-06, | |
| "loss": 0.1687, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.36826347305389223, | |
| "grad_norm": 1.4247699085203211, | |
| "learning_rate": 9.186509180942928e-06, | |
| "loss": 0.151, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.3712574850299401, | |
| "grad_norm": 1.4050909160957994, | |
| "learning_rate": 9.173606363381218e-06, | |
| "loss": 0.19, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.37425149700598803, | |
| "grad_norm": 1.4419847227880427, | |
| "learning_rate": 9.16061123395863e-06, | |
| "loss": 0.1538, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.3772455089820359, | |
| "grad_norm": 1.4134608914842157, | |
| "learning_rate": 9.147524080101543e-06, | |
| "loss": 0.1678, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.38023952095808383, | |
| "grad_norm": 1.5383409386271403, | |
| "learning_rate": 9.134345191271742e-06, | |
| "loss": 0.1886, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.38323353293413176, | |
| "grad_norm": 1.4943615355618307, | |
| "learning_rate": 9.121074858959997e-06, | |
| "loss": 0.2073, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.38622754491017963, | |
| "grad_norm": 1.4318715263715012, | |
| "learning_rate": 9.107713376679634e-06, | |
| "loss": 0.2013, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.38922155688622756, | |
| "grad_norm": 1.4111490546296792, | |
| "learning_rate": 9.094261039960028e-06, | |
| "loss": 0.1746, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.39221556886227543, | |
| "grad_norm": 1.4710309569633124, | |
| "learning_rate": 9.08071814634008e-06, | |
| "loss": 0.19, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.39520958083832336, | |
| "grad_norm": 1.3689974712125654, | |
| "learning_rate": 9.067084995361623e-06, | |
| "loss": 0.1679, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.39820359281437123, | |
| "grad_norm": 1.4813036075463237, | |
| "learning_rate": 9.053361888562807e-06, | |
| "loss": 0.2051, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.40119760479041916, | |
| "grad_norm": 1.3581339528334126, | |
| "learning_rate": 9.039549129471423e-06, | |
| "loss": 0.1712, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.4041916167664671, | |
| "grad_norm": 1.3412732619656769, | |
| "learning_rate": 9.025647023598196e-06, | |
| "loss": 0.1525, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.40718562874251496, | |
| "grad_norm": 1.3300419421729832, | |
| "learning_rate": 9.011655878430018e-06, | |
| "loss": 0.1547, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.4101796407185629, | |
| "grad_norm": 1.7435953827334294, | |
| "learning_rate": 8.99757600342316e-06, | |
| "loss": 0.2199, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.41317365269461076, | |
| "grad_norm": 1.4035336671598315, | |
| "learning_rate": 8.983407709996415e-06, | |
| "loss": 0.1634, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4161676646706587, | |
| "grad_norm": 1.4571937121479426, | |
| "learning_rate": 8.969151311524215e-06, | |
| "loss": 0.1498, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.41916167664670656, | |
| "grad_norm": 1.469988382357634, | |
| "learning_rate": 8.954807123329703e-06, | |
| "loss": 0.1846, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4221556886227545, | |
| "grad_norm": 1.26239912509337, | |
| "learning_rate": 8.940375462677758e-06, | |
| "loss": 0.1731, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4251497005988024, | |
| "grad_norm": 1.4250180415951337, | |
| "learning_rate": 8.92585664876797e-06, | |
| "loss": 0.1905, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.4281437125748503, | |
| "grad_norm": 1.2443460427451063, | |
| "learning_rate": 8.911251002727588e-06, | |
| "loss": 0.1591, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.4311377245508982, | |
| "grad_norm": 1.2888923009784803, | |
| "learning_rate": 8.896558847604414e-06, | |
| "loss": 0.1669, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.4341317365269461, | |
| "grad_norm": 1.4655014535926527, | |
| "learning_rate": 8.881780508359661e-06, | |
| "loss": 0.1952, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.437125748502994, | |
| "grad_norm": 1.469175147623945, | |
| "learning_rate": 8.86691631186076e-06, | |
| "loss": 0.1757, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.44011976047904194, | |
| "grad_norm": 1.2419437721303557, | |
| "learning_rate": 8.851966586874138e-06, | |
| "loss": 0.1623, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.4431137724550898, | |
| "grad_norm": 1.470523677240026, | |
| "learning_rate": 8.836931664057935e-06, | |
| "loss": 0.1937, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.44610778443113774, | |
| "grad_norm": 1.5453356507394016, | |
| "learning_rate": 8.821811875954705e-06, | |
| "loss": 0.1707, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.4491017964071856, | |
| "grad_norm": 1.3683268469149636, | |
| "learning_rate": 8.806607556984045e-06, | |
| "loss": 0.1885, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.45209580838323354, | |
| "grad_norm": 1.369697254242337, | |
| "learning_rate": 8.791319043435213e-06, | |
| "loss": 0.1776, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.4550898203592814, | |
| "grad_norm": 1.520116826707868, | |
| "learning_rate": 8.775946673459682e-06, | |
| "loss": 0.1895, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.45808383233532934, | |
| "grad_norm": 1.3708697767916063, | |
| "learning_rate": 8.76049078706366e-06, | |
| "loss": 0.1564, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.46107784431137727, | |
| "grad_norm": 1.3265960371371603, | |
| "learning_rate": 8.744951726100572e-06, | |
| "loss": 0.1613, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.46407185628742514, | |
| "grad_norm": 1.322830381595716, | |
| "learning_rate": 8.729329834263503e-06, | |
| "loss": 0.1549, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.46706586826347307, | |
| "grad_norm": 1.5948214340122033, | |
| "learning_rate": 8.713625457077585e-06, | |
| "loss": 0.2, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.47005988023952094, | |
| "grad_norm": 1.5416993947043487, | |
| "learning_rate": 8.697838941892371e-06, | |
| "loss": 0.1876, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.47305389221556887, | |
| "grad_norm": 1.305307904681769, | |
| "learning_rate": 8.681970637874131e-06, | |
| "loss": 0.1845, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.47604790419161674, | |
| "grad_norm": 1.5092289766072327, | |
| "learning_rate": 8.666020895998154e-06, | |
| "loss": 0.1936, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.47904191616766467, | |
| "grad_norm": 1.331255443997986, | |
| "learning_rate": 8.64999006904096e-06, | |
| "loss": 0.1586, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.4820359281437126, | |
| "grad_norm": 1.4982725488709654, | |
| "learning_rate": 8.63387851157252e-06, | |
| "loss": 0.1757, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.48502994011976047, | |
| "grad_norm": 1.2958557167720228, | |
| "learning_rate": 8.617686579948396e-06, | |
| "loss": 0.174, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.4880239520958084, | |
| "grad_norm": 1.460602213813088, | |
| "learning_rate": 8.60141463230187e-06, | |
| "loss": 0.1721, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.49101796407185627, | |
| "grad_norm": 1.47738555354595, | |
| "learning_rate": 8.585063028536015e-06, | |
| "loss": 0.1809, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.4940119760479042, | |
| "grad_norm": 1.5238869378910702, | |
| "learning_rate": 8.568632130315747e-06, | |
| "loss": 0.1958, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.49700598802395207, | |
| "grad_norm": 1.3046859484332995, | |
| "learning_rate": 8.552122301059807e-06, | |
| "loss": 0.1556, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.363717813302086, | |
| "learning_rate": 8.535533905932739e-06, | |
| "loss": 0.1482, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.5029940119760479, | |
| "grad_norm": 1.3906418155881468, | |
| "learning_rate": 8.518867311836808e-06, | |
| "loss": 0.1893, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.5059880239520959, | |
| "grad_norm": 1.1839732497288527, | |
| "learning_rate": 8.502122887403882e-06, | |
| "loss": 0.134, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5089820359281437, | |
| "grad_norm": 1.2712779187017653, | |
| "learning_rate": 8.485301002987285e-06, | |
| "loss": 0.1563, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5119760479041916, | |
| "grad_norm": 1.4454743827047176, | |
| "learning_rate": 8.468402030653598e-06, | |
| "loss": 0.1914, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.5149700598802395, | |
| "grad_norm": 1.477741821887339, | |
| "learning_rate": 8.451426344174433e-06, | |
| "loss": 0.1777, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5179640718562875, | |
| "grad_norm": 1.5545425376716913, | |
| "learning_rate": 8.434374319018165e-06, | |
| "loss": 0.1753, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5209580838323353, | |
| "grad_norm": 1.4534831560345418, | |
| "learning_rate": 8.417246332341638e-06, | |
| "loss": 0.1644, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5239520958083832, | |
| "grad_norm": 1.4588387941625, | |
| "learning_rate": 8.4000427629818e-06, | |
| "loss": 0.1785, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.5269461077844312, | |
| "grad_norm": 1.3277878892985038, | |
| "learning_rate": 8.382763991447344e-06, | |
| "loss": 0.1662, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.5299401197604791, | |
| "grad_norm": 1.5610718845048757, | |
| "learning_rate": 8.365410399910287e-06, | |
| "loss": 0.1823, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.5329341317365269, | |
| "grad_norm": 1.5730107085834315, | |
| "learning_rate": 8.347982372197515e-06, | |
| "loss": 0.1711, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.5359281437125748, | |
| "grad_norm": 1.4642995069346056, | |
| "learning_rate": 8.33048029378229e-06, | |
| "loss": 0.1747, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.5389221556886228, | |
| "grad_norm": 1.4679399657123675, | |
| "learning_rate": 8.312904551775731e-06, | |
| "loss": 0.2101, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5419161676646707, | |
| "grad_norm": 1.4513506802551264, | |
| "learning_rate": 8.295255534918249e-06, | |
| "loss": 0.2059, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.5449101796407185, | |
| "grad_norm": 1.5692431171532233, | |
| "learning_rate": 8.277533633570948e-06, | |
| "loss": 0.1814, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.5479041916167665, | |
| "grad_norm": 1.5753529340132642, | |
| "learning_rate": 8.25973923970699e-06, | |
| "loss": 0.1889, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.5508982035928144, | |
| "grad_norm": 1.4635429006163088, | |
| "learning_rate": 8.241872746902934e-06, | |
| "loss": 0.1782, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.5538922155688623, | |
| "grad_norm": 1.4422938004860255, | |
| "learning_rate": 8.223934550330015e-06, | |
| "loss": 0.1876, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.5568862275449101, | |
| "grad_norm": 1.432566243924132, | |
| "learning_rate": 8.20592504674542e-06, | |
| "loss": 0.1917, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.5598802395209581, | |
| "grad_norm": 1.7082673510625854, | |
| "learning_rate": 8.187844634483495e-06, | |
| "loss": 0.2195, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.562874251497006, | |
| "grad_norm": 1.5101649235336667, | |
| "learning_rate": 8.16969371344696e-06, | |
| "loss": 0.1875, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.5658682634730539, | |
| "grad_norm": 1.3185939977333685, | |
| "learning_rate": 8.151472685098037e-06, | |
| "loss": 0.178, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.5688622754491018, | |
| "grad_norm": 1.3674337652968656, | |
| "learning_rate": 8.13318195244958e-06, | |
| "loss": 0.1721, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5718562874251497, | |
| "grad_norm": 1.2493929955204526, | |
| "learning_rate": 8.114821920056177e-06, | |
| "loss": 0.1677, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.5748502994011976, | |
| "grad_norm": 1.5578551585851146, | |
| "learning_rate": 8.096392994005177e-06, | |
| "loss": 0.2082, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.5778443113772455, | |
| "grad_norm": 1.3001308219702912, | |
| "learning_rate": 8.077895581907719e-06, | |
| "loss": 0.176, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.5808383233532934, | |
| "grad_norm": 1.3679733679039041, | |
| "learning_rate": 8.059330092889724e-06, | |
| "loss": 0.1767, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.5838323353293413, | |
| "grad_norm": 1.5381087545275656, | |
| "learning_rate": 8.040696937582833e-06, | |
| "loss": 0.1955, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.5868263473053892, | |
| "grad_norm": 1.3054377743032035, | |
| "learning_rate": 8.021996528115335e-06, | |
| "loss": 0.1475, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.5898203592814372, | |
| "grad_norm": 1.4499427431352458, | |
| "learning_rate": 8.003229278103044e-06, | |
| "loss": 0.1579, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.592814371257485, | |
| "grad_norm": 1.423142221539539, | |
| "learning_rate": 7.984395602640153e-06, | |
| "loss": 0.18, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.5958083832335329, | |
| "grad_norm": 1.295166799716845, | |
| "learning_rate": 7.96549591829006e-06, | |
| "loss": 0.1589, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.5988023952095808, | |
| "grad_norm": 1.4194083990465078, | |
| "learning_rate": 7.946530643076138e-06, | |
| "loss": 0.1836, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.5988023952095808, | |
| "eval_loss": 0.19022199511528015, | |
| "eval_runtime": 3.0863, | |
| "eval_samples_per_second": 17.497, | |
| "eval_steps_per_second": 4.536, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6017964071856288, | |
| "grad_norm": 1.2328649277653527, | |
| "learning_rate": 7.927500196472506e-06, | |
| "loss": 0.1609, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.6047904191616766, | |
| "grad_norm": 1.4032760696529674, | |
| "learning_rate": 7.908404999394747e-06, | |
| "loss": 0.1534, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.6077844311377245, | |
| "grad_norm": 1.582059333177599, | |
| "learning_rate": 7.889245474190588e-06, | |
| "loss": 0.1948, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6107784431137725, | |
| "grad_norm": 1.856600541558819, | |
| "learning_rate": 7.870022044630569e-06, | |
| "loss": 0.2173, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.6137724550898204, | |
| "grad_norm": 1.386590399593482, | |
| "learning_rate": 7.85073513589867e-06, | |
| "loss": 0.1729, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.6167664670658682, | |
| "grad_norm": 1.4233964807960828, | |
| "learning_rate": 7.831385174582901e-06, | |
| "loss": 0.171, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.6197604790419161, | |
| "grad_norm": 1.3057913188518013, | |
| "learning_rate": 7.81197258866587e-06, | |
| "loss": 0.1526, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.6227544910179641, | |
| "grad_norm": 1.3451322518772548, | |
| "learning_rate": 7.792497807515317e-06, | |
| "loss": 0.1642, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.625748502994012, | |
| "grad_norm": 1.2873603109253433, | |
| "learning_rate": 7.772961261874615e-06, | |
| "loss": 0.166, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.6287425149700598, | |
| "grad_norm": 1.4922562378746234, | |
| "learning_rate": 7.75336338385325e-06, | |
| "loss": 0.1815, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6317365269461078, | |
| "grad_norm": 1.6554968661844722, | |
| "learning_rate": 7.733704606917248e-06, | |
| "loss": 0.2094, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.6347305389221557, | |
| "grad_norm": 1.3847720668827226, | |
| "learning_rate": 7.713985365879607e-06, | |
| "loss": 0.1622, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.6377245508982036, | |
| "grad_norm": 1.5696014332781518, | |
| "learning_rate": 7.694206096890667e-06, | |
| "loss": 0.222, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.6407185628742516, | |
| "grad_norm": 1.1886571735100542, | |
| "learning_rate": 7.674367237428467e-06, | |
| "loss": 0.1527, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.6437125748502994, | |
| "grad_norm": 1.3338409247430114, | |
| "learning_rate": 7.654469226289068e-06, | |
| "loss": 0.1636, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.6467065868263473, | |
| "grad_norm": 1.57503867703901, | |
| "learning_rate": 7.63451250357685e-06, | |
| "loss": 0.183, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.6497005988023952, | |
| "grad_norm": 1.2949691856502272, | |
| "learning_rate": 7.614497510694774e-06, | |
| "loss": 0.1571, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.6526946107784432, | |
| "grad_norm": 1.4056934428796073, | |
| "learning_rate": 7.5944246903346204e-06, | |
| "loss": 0.17, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.655688622754491, | |
| "grad_norm": 1.1825913742999679, | |
| "learning_rate": 7.574294486467204e-06, | |
| "loss": 0.1315, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.6586826347305389, | |
| "grad_norm": 1.3081815970344797, | |
| "learning_rate": 7.55410734433254e-06, | |
| "loss": 0.1549, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.6616766467065869, | |
| "grad_norm": 1.4823880649757675, | |
| "learning_rate": 7.533863710430011e-06, | |
| "loss": 0.1735, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.6646706586826348, | |
| "grad_norm": 1.0363374561072316, | |
| "learning_rate": 7.513564032508484e-06, | |
| "loss": 0.1259, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.6676646706586826, | |
| "grad_norm": 1.6447536973186065, | |
| "learning_rate": 7.493208759556406e-06, | |
| "loss": 0.2007, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.6706586826347305, | |
| "grad_norm": 1.5348288002068116, | |
| "learning_rate": 7.472798341791877e-06, | |
| "loss": 0.1869, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.6736526946107785, | |
| "grad_norm": 1.2217971859717047, | |
| "learning_rate": 7.452333230652688e-06, | |
| "loss": 0.1263, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.6766467065868264, | |
| "grad_norm": 1.4931457975488804, | |
| "learning_rate": 7.431813878786343e-06, | |
| "loss": 0.1926, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.6796407185628742, | |
| "grad_norm": 1.3106967413812014, | |
| "learning_rate": 7.4112407400400395e-06, | |
| "loss": 0.1316, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.6826347305389222, | |
| "grad_norm": 1.2753585367796443, | |
| "learning_rate": 7.390614269450633e-06, | |
| "loss": 0.1456, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.6856287425149701, | |
| "grad_norm": 1.4702650255600365, | |
| "learning_rate": 7.369934923234577e-06, | |
| "loss": 0.1407, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.688622754491018, | |
| "grad_norm": 1.5458879855102914, | |
| "learning_rate": 7.349203158777826e-06, | |
| "loss": 0.1657, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.6916167664670658, | |
| "grad_norm": 1.4693035655288298, | |
| "learning_rate": 7.32841943462572e-06, | |
| "loss": 0.162, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.6946107784431138, | |
| "grad_norm": 1.5342879935057077, | |
| "learning_rate": 7.3075842104728445e-06, | |
| "loss": 0.1796, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.6976047904191617, | |
| "grad_norm": 1.4003981601830726, | |
| "learning_rate": 7.286697947152868e-06, | |
| "loss": 0.1681, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.7005988023952096, | |
| "grad_norm": 1.1070003783816433, | |
| "learning_rate": 7.265761106628338e-06, | |
| "loss": 0.1406, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.7035928143712575, | |
| "grad_norm": 1.3838218846755281, | |
| "learning_rate": 7.244774151980466e-06, | |
| "loss": 0.1627, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.7065868263473054, | |
| "grad_norm": 1.6258121557824172, | |
| "learning_rate": 7.223737547398898e-06, | |
| "loss": 0.1649, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.7095808383233533, | |
| "grad_norm": 1.3157529559198688, | |
| "learning_rate": 7.20265175817143e-06, | |
| "loss": 0.1649, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.7125748502994012, | |
| "grad_norm": 1.6742628322343682, | |
| "learning_rate": 7.181517250673729e-06, | |
| "loss": 0.2161, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.7155688622754491, | |
| "grad_norm": 1.5254822214664852, | |
| "learning_rate": 7.1603344923590065e-06, | |
| "loss": 0.1702, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.718562874251497, | |
| "grad_norm": 1.5640449745674354, | |
| "learning_rate": 7.139103951747694e-06, | |
| "loss": 0.207, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.7215568862275449, | |
| "grad_norm": 1.3441090193696108, | |
| "learning_rate": 7.1178260984170675e-06, | |
| "loss": 0.1637, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.7245508982035929, | |
| "grad_norm": 1.2993797686631818, | |
| "learning_rate": 7.0965014029908654e-06, | |
| "loss": 0.1499, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.7275449101796407, | |
| "grad_norm": 1.466941719899587, | |
| "learning_rate": 7.075130337128883e-06, | |
| "loss": 0.1935, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.7305389221556886, | |
| "grad_norm": 1.5168061797994108, | |
| "learning_rate": 7.053713373516538e-06, | |
| "loss": 0.206, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.7335329341317365, | |
| "grad_norm": 1.4235802320708153, | |
| "learning_rate": 7.03225098585441e-06, | |
| "loss": 0.1898, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.7365269461077845, | |
| "grad_norm": 1.5019402258825536, | |
| "learning_rate": 7.0107436488477694e-06, | |
| "loss": 0.2097, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.7395209580838323, | |
| "grad_norm": 1.489142925529763, | |
| "learning_rate": 6.989191838196083e-06, | |
| "loss": 0.179, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.7425149700598802, | |
| "grad_norm": 1.2544034598676344, | |
| "learning_rate": 6.9675960305824785e-06, | |
| "loss": 0.1528, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.7455089820359282, | |
| "grad_norm": 1.5133779038279431, | |
| "learning_rate": 6.945956703663212e-06, | |
| "loss": 0.167, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.7485029940119761, | |
| "grad_norm": 1.4714784332373052, | |
| "learning_rate": 6.9242743360570985e-06, | |
| "loss": 0.1819, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.7514970059880239, | |
| "grad_norm": 1.3133942802338159, | |
| "learning_rate": 6.9025494073349284e-06, | |
| "loss": 0.167, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.7544910179640718, | |
| "grad_norm": 1.3896975867021697, | |
| "learning_rate": 6.880782398008862e-06, | |
| "loss": 0.1823, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.7574850299401198, | |
| "grad_norm": 1.368434847227758, | |
| "learning_rate": 6.858973789521792e-06, | |
| "loss": 0.151, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.7604790419161677, | |
| "grad_norm": 1.334293423664, | |
| "learning_rate": 6.837124064236709e-06, | |
| "loss": 0.1559, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.7634730538922155, | |
| "grad_norm": 1.239297385051478, | |
| "learning_rate": 6.815233705426019e-06, | |
| "loss": 0.1504, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.7664670658682635, | |
| "grad_norm": 1.303161793239362, | |
| "learning_rate": 6.7933031972608644e-06, | |
| "loss": 0.1491, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.7694610778443114, | |
| "grad_norm": 1.5387317632461297, | |
| "learning_rate": 6.771333024800411e-06, | |
| "loss": 0.1774, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.7724550898203593, | |
| "grad_norm": 1.5324704515179566, | |
| "learning_rate": 6.74932367398112e-06, | |
| "loss": 0.1707, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.7754491017964071, | |
| "grad_norm": 1.1518551255907672, | |
| "learning_rate": 6.727275631605996e-06, | |
| "loss": 0.1291, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.7784431137724551, | |
| "grad_norm": 1.422833097493017, | |
| "learning_rate": 6.70518938533383e-06, | |
| "loss": 0.1717, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.781437125748503, | |
| "grad_norm": 1.351548492029586, | |
| "learning_rate": 6.683065423668403e-06, | |
| "loss": 0.1449, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.7844311377245509, | |
| "grad_norm": 1.4779164247383754, | |
| "learning_rate": 6.660904235947687e-06, | |
| "loss": 0.1692, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.7874251497005988, | |
| "grad_norm": 1.4716075029844888, | |
| "learning_rate": 6.638706312333018e-06, | |
| "loss": 0.1713, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.7904191616766467, | |
| "grad_norm": 1.4783765168019238, | |
| "learning_rate": 6.61647214379826e-06, | |
| "loss": 0.1633, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.7934131736526946, | |
| "grad_norm": 1.693102368663237, | |
| "learning_rate": 6.594202222118941e-06, | |
| "loss": 0.1905, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.7964071856287425, | |
| "grad_norm": 1.3276274093278135, | |
| "learning_rate": 6.571897039861377e-06, | |
| "loss": 0.1486, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.7994011976047904, | |
| "grad_norm": 1.4692432546912064, | |
| "learning_rate": 6.549557090371775e-06, | |
| "loss": 0.172, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.8023952095808383, | |
| "grad_norm": 1.3248953549918894, | |
| "learning_rate": 6.527182867765333e-06, | |
| "loss": 0.14, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.8053892215568862, | |
| "grad_norm": 1.6796857746587177, | |
| "learning_rate": 6.504774866915291e-06, | |
| "loss": 0.2006, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.8083832335329342, | |
| "grad_norm": 1.2836143155584496, | |
| "learning_rate": 6.482333583442002e-06, | |
| "loss": 0.1412, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.811377245508982, | |
| "grad_norm": 1.6099561973224124, | |
| "learning_rate": 6.459859513701967e-06, | |
| "loss": 0.1825, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.8143712574850299, | |
| "grad_norm": 1.7069354050567391, | |
| "learning_rate": 6.437353154776848e-06, | |
| "loss": 0.1751, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.8173652694610778, | |
| "grad_norm": 1.4136749694753044, | |
| "learning_rate": 6.414815004462483e-06, | |
| "loss": 0.1422, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.8203592814371258, | |
| "grad_norm": 1.508934606401253, | |
| "learning_rate": 6.3922455612578715e-06, | |
| "loss": 0.1956, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.8233532934131736, | |
| "grad_norm": 1.3545270759779238, | |
| "learning_rate": 6.369645324354149e-06, | |
| "loss": 0.1477, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.8263473053892215, | |
| "grad_norm": 1.440859298657408, | |
| "learning_rate": 6.3470147936235485e-06, | |
| "loss": 0.1919, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.8293413173652695, | |
| "grad_norm": 1.0894123552900015, | |
| "learning_rate": 6.3243544696083355e-06, | |
| "loss": 0.1462, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.8323353293413174, | |
| "grad_norm": 1.4958591909013434, | |
| "learning_rate": 6.301664853509755e-06, | |
| "loss": 0.176, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.8353293413173652, | |
| "grad_norm": 1.4165981518727009, | |
| "learning_rate": 6.278946447176924e-06, | |
| "loss": 0.1343, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.8383233532934131, | |
| "grad_norm": 1.40156916614215, | |
| "learning_rate": 6.256199753095745e-06, | |
| "loss": 0.1739, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8413173652694611, | |
| "grad_norm": 1.270425917802996, | |
| "learning_rate": 6.233425274377793e-06, | |
| "loss": 0.152, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.844311377245509, | |
| "grad_norm": 1.2970746859582205, | |
| "learning_rate": 6.21062351474918e-06, | |
| "loss": 0.1503, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.8473053892215568, | |
| "grad_norm": 1.2728542714405398, | |
| "learning_rate": 6.18779497853942e-06, | |
| "loss": 0.1643, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.8502994011976048, | |
| "grad_norm": 1.3622674275286217, | |
| "learning_rate": 6.164940170670266e-06, | |
| "loss": 0.1801, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.8532934131736527, | |
| "grad_norm": 1.3361260704292115, | |
| "learning_rate": 6.142059596644557e-06, | |
| "loss": 0.1557, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.8562874251497006, | |
| "grad_norm": 1.29598026638277, | |
| "learning_rate": 6.11915376253502e-06, | |
| "loss": 0.1358, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.8592814371257484, | |
| "grad_norm": 1.2482426112862781, | |
| "learning_rate": 6.096223174973091e-06, | |
| "loss": 0.1481, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.8622754491017964, | |
| "grad_norm": 1.4101053105247794, | |
| "learning_rate": 6.073268341137694e-06, | |
| "loss": 0.1427, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.8652694610778443, | |
| "grad_norm": 1.364788542873011, | |
| "learning_rate": 6.050289768744042e-06, | |
| "loss": 0.147, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.8682634730538922, | |
| "grad_norm": 1.382884855635359, | |
| "learning_rate": 6.0272879660323936e-06, | |
| "loss": 0.1678, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.8712574850299402, | |
| "grad_norm": 1.471513901724229, | |
| "learning_rate": 6.004263441756815e-06, | |
| "loss": 0.1659, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.874251497005988, | |
| "grad_norm": 1.4770682919421774, | |
| "learning_rate": 5.98121670517393e-06, | |
| "loss": 0.1822, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.8772455089820359, | |
| "grad_norm": 1.3162666443860314, | |
| "learning_rate": 5.958148266031654e-06, | |
| "loss": 0.1642, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.8802395209580839, | |
| "grad_norm": 1.2741438126797262, | |
| "learning_rate": 5.935058634557917e-06, | |
| "loss": 0.1466, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.8832335329341318, | |
| "grad_norm": 1.3756217086358093, | |
| "learning_rate": 5.911948321449384e-06, | |
| "loss": 0.1564, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.8862275449101796, | |
| "grad_norm": 1.5905463029202878, | |
| "learning_rate": 5.8888178378601565e-06, | |
| "loss": 0.178, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.8892215568862275, | |
| "grad_norm": 1.3794592396357657, | |
| "learning_rate": 5.865667695390468e-06, | |
| "loss": 0.1665, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.8922155688622755, | |
| "grad_norm": 1.1832311575539562, | |
| "learning_rate": 5.842498406075363e-06, | |
| "loss": 0.1288, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.8952095808383234, | |
| "grad_norm": 1.2926099770062578, | |
| "learning_rate": 5.819310482373381e-06, | |
| "loss": 0.1257, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.8982035928143712, | |
| "grad_norm": 1.4748316249775528, | |
| "learning_rate": 5.796104437155213e-06, | |
| "loss": 0.1692, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9011976047904192, | |
| "grad_norm": 1.3016038482111374, | |
| "learning_rate": 5.772880783692363e-06, | |
| "loss": 0.1405, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.9041916167664671, | |
| "grad_norm": 1.4111179472892534, | |
| "learning_rate": 5.749640035645798e-06, | |
| "loss": 0.1627, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.907185628742515, | |
| "grad_norm": 1.451587979671474, | |
| "learning_rate": 5.726382707054578e-06, | |
| "loss": 0.1355, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.9101796407185628, | |
| "grad_norm": 1.429976389437299, | |
| "learning_rate": 5.703109312324493e-06, | |
| "loss": 0.1616, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.9131736526946108, | |
| "grad_norm": 1.3685075412490366, | |
| "learning_rate": 5.679820366216684e-06, | |
| "loss": 0.181, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.9161676646706587, | |
| "grad_norm": 1.1837059567057446, | |
| "learning_rate": 5.656516383836263e-06, | |
| "loss": 0.1187, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.9191616766467066, | |
| "grad_norm": 1.4335788958594091, | |
| "learning_rate": 5.6331978806209044e-06, | |
| "loss": 0.1851, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.9221556886227545, | |
| "grad_norm": 1.4336240146169037, | |
| "learning_rate": 5.609865372329461e-06, | |
| "loss": 0.1578, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.9251497005988024, | |
| "grad_norm": 1.403189027128548, | |
| "learning_rate": 5.586519375030549e-06, | |
| "loss": 0.1714, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.9281437125748503, | |
| "grad_norm": 1.4406850814797973, | |
| "learning_rate": 5.5631604050911354e-06, | |
| "loss": 0.1656, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.9311377245508982, | |
| "grad_norm": 1.341163509759973, | |
| "learning_rate": 5.539788979165115e-06, | |
| "loss": 0.16, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.9341317365269461, | |
| "grad_norm": 1.2020643014821206, | |
| "learning_rate": 5.516405614181883e-06, | |
| "loss": 0.1488, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.937125748502994, | |
| "grad_norm": 1.176455200381604, | |
| "learning_rate": 5.4930108273349034e-06, | |
| "loss": 0.1346, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.9401197604790419, | |
| "grad_norm": 1.3118580851351633, | |
| "learning_rate": 5.4696051360702725e-06, | |
| "loss": 0.1514, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.9431137724550899, | |
| "grad_norm": 1.5281399531855036, | |
| "learning_rate": 5.446189058075265e-06, | |
| "loss": 0.1823, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.9461077844311377, | |
| "grad_norm": 1.6210957919180358, | |
| "learning_rate": 5.4227631112668955e-06, | |
| "loss": 0.1978, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.9491017964071856, | |
| "grad_norm": 1.2790753507784585, | |
| "learning_rate": 5.39932781378045e-06, | |
| "loss": 0.14, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.9520958083832335, | |
| "grad_norm": 1.5042419713466963, | |
| "learning_rate": 5.375883683958041e-06, | |
| "loss": 0.1738, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.9550898203592815, | |
| "grad_norm": 1.3762890941997876, | |
| "learning_rate": 5.3524312403371255e-06, | |
| "loss": 0.1442, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.9580838323353293, | |
| "grad_norm": 1.4792595383320328, | |
| "learning_rate": 5.328971001639054e-06, | |
| "loss": 0.1506, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.9610778443113772, | |
| "grad_norm": 1.713286556158787, | |
| "learning_rate": 5.3055034867575825e-06, | |
| "loss": 0.1868, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.9640718562874252, | |
| "grad_norm": 1.5163942554527314, | |
| "learning_rate": 5.282029214747404e-06, | |
| "loss": 0.1838, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.9670658682634731, | |
| "grad_norm": 1.4150676288457729, | |
| "learning_rate": 5.258548704812667e-06, | |
| "loss": 0.1605, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.9700598802395209, | |
| "grad_norm": 1.5572217585129282, | |
| "learning_rate": 5.235062476295488e-06, | |
| "loss": 0.1715, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.9730538922155688, | |
| "grad_norm": 1.459603229024774, | |
| "learning_rate": 5.211571048664469e-06, | |
| "loss": 0.1552, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.9760479041916168, | |
| "grad_norm": 1.2677192405836033, | |
| "learning_rate": 5.188074941503203e-06, | |
| "loss": 0.1587, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.9790419161676647, | |
| "grad_norm": 1.5973282195337155, | |
| "learning_rate": 5.164574674498788e-06, | |
| "loss": 0.1775, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.9820359281437125, | |
| "grad_norm": 1.3593203968730776, | |
| "learning_rate": 5.141070767430331e-06, | |
| "loss": 0.1519, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.9850299401197605, | |
| "grad_norm": 1.3410983074327405, | |
| "learning_rate": 5.117563740157444e-06, | |
| "loss": 0.1526, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.9880239520958084, | |
| "grad_norm": 1.4584326893416701, | |
| "learning_rate": 5.094054112608758e-06, | |
| "loss": 0.163, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.9910179640718563, | |
| "grad_norm": 1.3915031715443078, | |
| "learning_rate": 5.070542404770413e-06, | |
| "loss": 0.166, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.9940119760479041, | |
| "grad_norm": 1.1490530041387048, | |
| "learning_rate": 5.047029136674563e-06, | |
| "loss": 0.1341, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.9970059880239521, | |
| "grad_norm": 1.744421245230071, | |
| "learning_rate": 5.023514828387868e-06, | |
| "loss": 0.1941, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.1302029456919445, | |
| "learning_rate": 5e-06, | |
| "loss": 0.1062, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.0029940119760479, | |
| "grad_norm": 1.0301411958153148, | |
| "learning_rate": 4.976485171612134e-06, | |
| "loss": 0.0705, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.0059880239520957, | |
| "grad_norm": 0.9534144165870336, | |
| "learning_rate": 4.95297086332544e-06, | |
| "loss": 0.0784, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.0089820359281436, | |
| "grad_norm": 0.8875517634859607, | |
| "learning_rate": 4.9294575952295896e-06, | |
| "loss": 0.0625, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.0119760479041917, | |
| "grad_norm": 1.0497959751051233, | |
| "learning_rate": 4.905945887391242e-06, | |
| "loss": 0.0706, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.0149700598802396, | |
| "grad_norm": 1.0003404866627121, | |
| "learning_rate": 4.882436259842556e-06, | |
| "loss": 0.059, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.0179640718562875, | |
| "grad_norm": 0.9901367775352244, | |
| "learning_rate": 4.858929232569671e-06, | |
| "loss": 0.06, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.0209580838323353, | |
| "grad_norm": 1.1938074457024948, | |
| "learning_rate": 4.835425325501214e-06, | |
| "loss": 0.0814, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.0239520958083832, | |
| "grad_norm": 1.13110594829736, | |
| "learning_rate": 4.811925058496799e-06, | |
| "loss": 0.0636, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.026946107784431, | |
| "grad_norm": 1.1384847052722267, | |
| "learning_rate": 4.788428951335534e-06, | |
| "loss": 0.0643, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.029940119760479, | |
| "grad_norm": 1.0626783005270215, | |
| "learning_rate": 4.7649375237045135e-06, | |
| "loss": 0.0614, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.032934131736527, | |
| "grad_norm": 1.0005997667497095, | |
| "learning_rate": 4.741451295187333e-06, | |
| "loss": 0.049, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.035928143712575, | |
| "grad_norm": 1.3742897362997926, | |
| "learning_rate": 4.717970785252596e-06, | |
| "loss": 0.0618, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.0389221556886228, | |
| "grad_norm": 1.495868364429878, | |
| "learning_rate": 4.694496513242418e-06, | |
| "loss": 0.0678, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.0419161676646707, | |
| "grad_norm": 1.320005797709433, | |
| "learning_rate": 4.671028998360947e-06, | |
| "loss": 0.0569, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.0449101796407185, | |
| "grad_norm": 1.3309259857792877, | |
| "learning_rate": 4.647568759662876e-06, | |
| "loss": 0.0558, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.0479041916167664, | |
| "grad_norm": 1.431807315363939, | |
| "learning_rate": 4.624116316041962e-06, | |
| "loss": 0.0615, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.0508982035928143, | |
| "grad_norm": 1.3044460557858018, | |
| "learning_rate": 4.600672186219551e-06, | |
| "loss": 0.0546, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.0538922155688624, | |
| "grad_norm": 1.8898062153241388, | |
| "learning_rate": 4.5772368887331044e-06, | |
| "loss": 0.0586, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.0568862275449102, | |
| "grad_norm": 1.6149105743087797, | |
| "learning_rate": 4.553810941924735e-06, | |
| "loss": 0.0711, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.0598802395209581, | |
| "grad_norm": 1.4435295499276675, | |
| "learning_rate": 4.530394863929728e-06, | |
| "loss": 0.0641, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.062874251497006, | |
| "grad_norm": 1.477570139346285, | |
| "learning_rate": 4.506989172665097e-06, | |
| "loss": 0.0665, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.0658682634730539, | |
| "grad_norm": 1.5024359471219393, | |
| "learning_rate": 4.483594385818119e-06, | |
| "loss": 0.0597, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.0688622754491017, | |
| "grad_norm": 1.669425769841117, | |
| "learning_rate": 4.460211020834887e-06, | |
| "loss": 0.0718, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.0718562874251496, | |
| "grad_norm": 1.542829376459333, | |
| "learning_rate": 4.436839594908866e-06, | |
| "loss": 0.0618, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.0748502994011977, | |
| "grad_norm": 1.5108915413273982, | |
| "learning_rate": 4.4134806249694514e-06, | |
| "loss": 0.0566, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.0778443113772456, | |
| "grad_norm": 1.7289620256406486, | |
| "learning_rate": 4.39013462767054e-06, | |
| "loss": 0.0792, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.0808383233532934, | |
| "grad_norm": 1.268979122583932, | |
| "learning_rate": 4.366802119379098e-06, | |
| "loss": 0.0571, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.0838323353293413, | |
| "grad_norm": 1.3831378340103766, | |
| "learning_rate": 4.34348361616374e-06, | |
| "loss": 0.0636, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.0868263473053892, | |
| "grad_norm": 1.5294459766328508, | |
| "learning_rate": 4.3201796337833165e-06, | |
| "loss": 0.0624, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.089820359281437, | |
| "grad_norm": 1.2580753718523576, | |
| "learning_rate": 4.29689068767551e-06, | |
| "loss": 0.0558, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.092814371257485, | |
| "grad_norm": 1.206809516428362, | |
| "learning_rate": 4.273617292945425e-06, | |
| "loss": 0.0666, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.095808383233533, | |
| "grad_norm": 1.1776036333029534, | |
| "learning_rate": 4.250359964354203e-06, | |
| "loss": 0.0557, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.098802395209581, | |
| "grad_norm": 1.3618726571615478, | |
| "learning_rate": 4.227119216307637e-06, | |
| "loss": 0.0566, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.1017964071856288, | |
| "grad_norm": 1.2329560075406734, | |
| "learning_rate": 4.203895562844789e-06, | |
| "loss": 0.0597, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.1047904191616766, | |
| "grad_norm": 1.272817504045309, | |
| "learning_rate": 4.18068951762662e-06, | |
| "loss": 0.0546, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.1077844311377245, | |
| "grad_norm": 1.1719459610641785, | |
| "learning_rate": 4.157501593924638e-06, | |
| "loss": 0.0671, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.1107784431137724, | |
| "grad_norm": 1.3847006139716986, | |
| "learning_rate": 4.134332304609533e-06, | |
| "loss": 0.0768, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.1137724550898203, | |
| "grad_norm": 1.326362163282319, | |
| "learning_rate": 4.111182162139844e-06, | |
| "loss": 0.0572, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.1167664670658684, | |
| "grad_norm": 1.2092642186155682, | |
| "learning_rate": 4.088051678550617e-06, | |
| "loss": 0.0544, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.1197604790419162, | |
| "grad_norm": 1.083711366357108, | |
| "learning_rate": 4.064941365442084e-06, | |
| "loss": 0.0556, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.122754491017964, | |
| "grad_norm": 1.5349371279443185, | |
| "learning_rate": 4.041851733968348e-06, | |
| "loss": 0.0762, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.125748502994012, | |
| "grad_norm": 1.3178355000770563, | |
| "learning_rate": 4.018783294826071e-06, | |
| "loss": 0.0677, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.1287425149700598, | |
| "grad_norm": 1.6286157469829714, | |
| "learning_rate": 3.995736558243186e-06, | |
| "loss": 0.0717, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.1317365269461077, | |
| "grad_norm": 1.348101304530768, | |
| "learning_rate": 3.972712033967608e-06, | |
| "loss": 0.0688, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.1347305389221556, | |
| "grad_norm": 1.2286276308032429, | |
| "learning_rate": 3.949710231255961e-06, | |
| "loss": 0.0689, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.1377245508982037, | |
| "grad_norm": 1.211621227591097, | |
| "learning_rate": 3.926731658862307e-06, | |
| "loss": 0.0566, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.1407185628742516, | |
| "grad_norm": 1.1654042588335953, | |
| "learning_rate": 3.903776825026912e-06, | |
| "loss": 0.0563, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.1437125748502994, | |
| "grad_norm": 1.3582181489841851, | |
| "learning_rate": 3.8808462374649805e-06, | |
| "loss": 0.0721, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.1467065868263473, | |
| "grad_norm": 1.1199386513545064, | |
| "learning_rate": 3.857940403355444e-06, | |
| "loss": 0.0563, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.1497005988023952, | |
| "grad_norm": 1.220564577027249, | |
| "learning_rate": 3.8350598293297345e-06, | |
| "loss": 0.0687, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.152694610778443, | |
| "grad_norm": 1.3435969691437664, | |
| "learning_rate": 3.8122050214605822e-06, | |
| "loss": 0.0617, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.1556886227544911, | |
| "grad_norm": 1.42145262643414, | |
| "learning_rate": 3.7893764852508207e-06, | |
| "loss": 0.0797, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.158682634730539, | |
| "grad_norm": 1.0967161281247078, | |
| "learning_rate": 3.766574725622208e-06, | |
| "loss": 0.0491, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.1616766467065869, | |
| "grad_norm": 1.0536310088855143, | |
| "learning_rate": 3.7438002469042567e-06, | |
| "loss": 0.0452, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.1646706586826348, | |
| "grad_norm": 1.3338555670117132, | |
| "learning_rate": 3.721053552823078e-06, | |
| "loss": 0.0662, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.1676646706586826, | |
| "grad_norm": 1.2183458484076521, | |
| "learning_rate": 3.698335146490246e-06, | |
| "loss": 0.0588, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.1706586826347305, | |
| "grad_norm": 1.372733216321418, | |
| "learning_rate": 3.675645530391665e-06, | |
| "loss": 0.0579, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.1736526946107784, | |
| "grad_norm": 1.3317219954849193, | |
| "learning_rate": 3.652985206376455e-06, | |
| "loss": 0.0564, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.1766467065868262, | |
| "grad_norm": 1.3057682597058806, | |
| "learning_rate": 3.630354675645853e-06, | |
| "loss": 0.0722, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.1796407185628743, | |
| "grad_norm": 1.3957893502090077, | |
| "learning_rate": 3.6077544387421293e-06, | |
| "loss": 0.0638, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.1826347305389222, | |
| "grad_norm": 1.1368274192843975, | |
| "learning_rate": 3.5851849955375177e-06, | |
| "loss": 0.0545, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.18562874251497, | |
| "grad_norm": 1.1397331480671449, | |
| "learning_rate": 3.5626468452231534e-06, | |
| "loss": 0.0502, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.188622754491018, | |
| "grad_norm": 1.4034619795591885, | |
| "learning_rate": 3.540140486298035e-06, | |
| "loss": 0.0609, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.1916167664670658, | |
| "grad_norm": 1.3224668155206674, | |
| "learning_rate": 3.517666416557999e-06, | |
| "loss": 0.069, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.1946107784431137, | |
| "grad_norm": 1.3093552198323877, | |
| "learning_rate": 3.495225133084712e-06, | |
| "loss": 0.0523, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.1976047904191618, | |
| "grad_norm": 0.9947346641470692, | |
| "learning_rate": 3.472817132234669e-06, | |
| "loss": 0.0478, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.1976047904191618, | |
| "eval_loss": 0.19725804030895233, | |
| "eval_runtime": 3.0852, | |
| "eval_samples_per_second": 17.503, | |
| "eval_steps_per_second": 4.538, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.2005988023952097, | |
| "grad_norm": 1.3127713173249609, | |
| "learning_rate": 3.4504429096282246e-06, | |
| "loss": 0.0597, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.2035928143712575, | |
| "grad_norm": 1.35146636986748, | |
| "learning_rate": 3.428102960138625e-06, | |
| "loss": 0.0687, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.2065868263473054, | |
| "grad_norm": 1.8213309701575409, | |
| "learning_rate": 3.405797777881059e-06, | |
| "loss": 0.0664, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.2095808383233533, | |
| "grad_norm": 1.1979004155689492, | |
| "learning_rate": 3.3835278562017405e-06, | |
| "loss": 0.0535, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.2125748502994012, | |
| "grad_norm": 1.121706023631107, | |
| "learning_rate": 3.3612936876669834e-06, | |
| "loss": 0.0546, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.215568862275449, | |
| "grad_norm": 1.3194886211190295, | |
| "learning_rate": 3.3390957640523147e-06, | |
| "loss": 0.0689, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.218562874251497, | |
| "grad_norm": 1.2215382094949854, | |
| "learning_rate": 3.3169345763315986e-06, | |
| "loss": 0.0552, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.221556886227545, | |
| "grad_norm": 1.21446429545779, | |
| "learning_rate": 3.29481061466617e-06, | |
| "loss": 0.0648, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.2245508982035929, | |
| "grad_norm": 1.3469944746373024, | |
| "learning_rate": 3.2727243683940045e-06, | |
| "loss": 0.0671, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.2275449101796407, | |
| "grad_norm": 1.2763685894045103, | |
| "learning_rate": 3.2506763260188824e-06, | |
| "loss": 0.0541, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.2305389221556886, | |
| "grad_norm": 1.119338332619205, | |
| "learning_rate": 3.2286669751995905e-06, | |
| "loss": 0.0524, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.2335329341317365, | |
| "grad_norm": 1.2356509069973838, | |
| "learning_rate": 3.2066968027391377e-06, | |
| "loss": 0.0517, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.2365269461077844, | |
| "grad_norm": 1.11323113448516, | |
| "learning_rate": 3.1847662945739833e-06, | |
| "loss": 0.0528, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.2395209580838324, | |
| "grad_norm": 1.1250857116890867, | |
| "learning_rate": 3.1628759357632943e-06, | |
| "loss": 0.0516, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.2425149700598803, | |
| "grad_norm": 1.0904272383660454, | |
| "learning_rate": 3.1410262104782086e-06, | |
| "loss": 0.0456, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.2455089820359282, | |
| "grad_norm": 1.306363181321619, | |
| "learning_rate": 3.119217601991139e-06, | |
| "loss": 0.0549, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.248502994011976, | |
| "grad_norm": 1.3046323547783265, | |
| "learning_rate": 3.0974505926650724e-06, | |
| "loss": 0.0606, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.251497005988024, | |
| "grad_norm": 1.6030137697934492, | |
| "learning_rate": 3.0757256639429027e-06, | |
| "loss": 0.0678, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.2544910179640718, | |
| "grad_norm": 1.1506637455607638, | |
| "learning_rate": 3.0540432963367907e-06, | |
| "loss": 0.0567, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.2574850299401197, | |
| "grad_norm": 1.0478237148802825, | |
| "learning_rate": 3.032403969417523e-06, | |
| "loss": 0.046, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.2604790419161676, | |
| "grad_norm": 1.20385913996633, | |
| "learning_rate": 3.010808161803917e-06, | |
| "loss": 0.0571, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.2634730538922156, | |
| "grad_norm": 1.3543029874098547, | |
| "learning_rate": 2.9892563511522305e-06, | |
| "loss": 0.0608, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.2664670658682635, | |
| "grad_norm": 1.2814110927742595, | |
| "learning_rate": 2.9677490141455915e-06, | |
| "loss": 0.0606, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.2694610778443114, | |
| "grad_norm": 1.264673331108618, | |
| "learning_rate": 2.946286626483463e-06, | |
| "loss": 0.0619, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.2724550898203593, | |
| "grad_norm": 1.363436430444569, | |
| "learning_rate": 2.924869662871117e-06, | |
| "loss": 0.0642, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.2754491017964071, | |
| "grad_norm": 1.2717274590602097, | |
| "learning_rate": 2.903498597009136e-06, | |
| "loss": 0.0664, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.278443113772455, | |
| "grad_norm": 1.361259025755947, | |
| "learning_rate": 2.8821739015829338e-06, | |
| "loss": 0.0577, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.281437125748503, | |
| "grad_norm": 1.0344658869629917, | |
| "learning_rate": 2.8608960482523058e-06, | |
| "loss": 0.0424, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.284431137724551, | |
| "grad_norm": 1.2472116027549918, | |
| "learning_rate": 2.839665507640992e-06, | |
| "loss": 0.0502, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.2874251497005988, | |
| "grad_norm": 1.1997772776331364, | |
| "learning_rate": 2.818482749326272e-06, | |
| "loss": 0.0554, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.2904191616766467, | |
| "grad_norm": 1.2369387064395052, | |
| "learning_rate": 2.797348241828569e-06, | |
| "loss": 0.0527, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.2934131736526946, | |
| "grad_norm": 1.1724057275819522, | |
| "learning_rate": 2.776262452601104e-06, | |
| "loss": 0.0519, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.2964071856287425, | |
| "grad_norm": 1.389683272027915, | |
| "learning_rate": 2.7552258480195348e-06, | |
| "loss": 0.0644, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.2994011976047903, | |
| "grad_norm": 1.2213077838683633, | |
| "learning_rate": 2.734238893371667e-06, | |
| "loss": 0.0537, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.3023952095808382, | |
| "grad_norm": 1.3620234317629327, | |
| "learning_rate": 2.7133020528471322e-06, | |
| "loss": 0.0574, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.3053892215568863, | |
| "grad_norm": 1.175530493975421, | |
| "learning_rate": 2.6924157895271563e-06, | |
| "loss": 0.0564, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.3083832335329342, | |
| "grad_norm": 1.3140478736969217, | |
| "learning_rate": 2.671580565374282e-06, | |
| "loss": 0.0604, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.311377245508982, | |
| "grad_norm": 1.4197028024853746, | |
| "learning_rate": 2.6507968412221763e-06, | |
| "loss": 0.0536, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.31437125748503, | |
| "grad_norm": 1.210254658873082, | |
| "learning_rate": 2.6300650767654234e-06, | |
| "loss": 0.0562, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.3173652694610778, | |
| "grad_norm": 1.2905219317427121, | |
| "learning_rate": 2.6093857305493666e-06, | |
| "loss": 0.0625, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.3203592814371259, | |
| "grad_norm": 1.185904574436851, | |
| "learning_rate": 2.588759259959962e-06, | |
| "loss": 0.0551, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.3233532934131738, | |
| "grad_norm": 1.2211827834219973, | |
| "learning_rate": 2.568186121213658e-06, | |
| "loss": 0.057, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.3263473053892216, | |
| "grad_norm": 1.307695215380919, | |
| "learning_rate": 2.547666769347312e-06, | |
| "loss": 0.0629, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.3293413173652695, | |
| "grad_norm": 1.4478310063238777, | |
| "learning_rate": 2.5272016582081236e-06, | |
| "loss": 0.0642, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.3323353293413174, | |
| "grad_norm": 1.283283255945992, | |
| "learning_rate": 2.5067912404435952e-06, | |
| "loss": 0.0568, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.3353293413173652, | |
| "grad_norm": 1.4928300964721626, | |
| "learning_rate": 2.486435967491516e-06, | |
| "loss": 0.069, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.3383233532934131, | |
| "grad_norm": 1.3186943881647646, | |
| "learning_rate": 2.4661362895699903e-06, | |
| "loss": 0.0598, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.341317365269461, | |
| "grad_norm": 1.4036443782164274, | |
| "learning_rate": 2.445892655667462e-06, | |
| "loss": 0.0689, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.3443113772455089, | |
| "grad_norm": 1.4779086712077336, | |
| "learning_rate": 2.425705513532798e-06, | |
| "loss": 0.0687, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.347305389221557, | |
| "grad_norm": 1.4850835797869666, | |
| "learning_rate": 2.4055753096653795e-06, | |
| "loss": 0.0676, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.3502994011976048, | |
| "grad_norm": 1.6180548095711738, | |
| "learning_rate": 2.3855024893052286e-06, | |
| "loss": 0.0612, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.3532934131736527, | |
| "grad_norm": 1.1860096993931721, | |
| "learning_rate": 2.365487496423152e-06, | |
| "loss": 0.0568, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.3562874251497006, | |
| "grad_norm": 1.4058937803410392, | |
| "learning_rate": 2.3455307737109338e-06, | |
| "loss": 0.0568, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.3592814371257484, | |
| "grad_norm": 1.3586161484422434, | |
| "learning_rate": 2.3256327625715345e-06, | |
| "loss": 0.0613, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.3622754491017965, | |
| "grad_norm": 1.2502896570057045, | |
| "learning_rate": 2.3057939031093346e-06, | |
| "loss": 0.0516, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.3652694610778444, | |
| "grad_norm": 1.0171820976890555, | |
| "learning_rate": 2.2860146341203936e-06, | |
| "loss": 0.0461, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.3682634730538923, | |
| "grad_norm": 1.326599275377114, | |
| "learning_rate": 2.2662953930827546e-06, | |
| "loss": 0.0624, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.3712574850299402, | |
| "grad_norm": 1.0725060933209747, | |
| "learning_rate": 2.2466366161467528e-06, | |
| "loss": 0.0446, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.374251497005988, | |
| "grad_norm": 1.496801539390415, | |
| "learning_rate": 2.227038738125385e-06, | |
| "loss": 0.0602, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.377245508982036, | |
| "grad_norm": 1.3709938548433498, | |
| "learning_rate": 2.207502192484685e-06, | |
| "loss": 0.0574, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.3802395209580838, | |
| "grad_norm": 1.0492528919038995, | |
| "learning_rate": 2.188027411334131e-06, | |
| "loss": 0.0524, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.3832335329341316, | |
| "grad_norm": 1.345939683654287, | |
| "learning_rate": 2.1686148254171012e-06, | |
| "loss": 0.0581, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.3862275449101795, | |
| "grad_norm": 1.2305296236864758, | |
| "learning_rate": 2.1492648641013305e-06, | |
| "loss": 0.0534, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.3892215568862276, | |
| "grad_norm": 1.005554337078165, | |
| "learning_rate": 2.1299779553694323e-06, | |
| "loss": 0.0474, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.3922155688622755, | |
| "grad_norm": 1.3191230769121984, | |
| "learning_rate": 2.1107545258094135e-06, | |
| "loss": 0.0572, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.3952095808383234, | |
| "grad_norm": 1.3205682883141818, | |
| "learning_rate": 2.0915950006052555e-06, | |
| "loss": 0.0505, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.3982035928143712, | |
| "grad_norm": 1.134367699425857, | |
| "learning_rate": 2.0724998035274947e-06, | |
| "loss": 0.0512, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.401197604790419, | |
| "grad_norm": 1.0766516451834467, | |
| "learning_rate": 2.053469356923865e-06, | |
| "loss": 0.0421, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.4041916167664672, | |
| "grad_norm": 1.2362491283742734, | |
| "learning_rate": 2.0345040817099433e-06, | |
| "loss": 0.0443, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.407185628742515, | |
| "grad_norm": 1.123061854877233, | |
| "learning_rate": 2.0156043973598475e-06, | |
| "loss": 0.0536, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.410179640718563, | |
| "grad_norm": 1.507561785459226, | |
| "learning_rate": 1.996770721896957e-06, | |
| "loss": 0.0592, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.4131736526946108, | |
| "grad_norm": 1.0317066417403218, | |
| "learning_rate": 1.9780034718846653e-06, | |
| "loss": 0.0439, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.4161676646706587, | |
| "grad_norm": 1.4318913991993594, | |
| "learning_rate": 1.9593030624171683e-06, | |
| "loss": 0.0616, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.4191616766467066, | |
| "grad_norm": 1.2922420343307102, | |
| "learning_rate": 1.9406699071102774e-06, | |
| "loss": 0.0562, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.4221556886227544, | |
| "grad_norm": 1.2026096334409924, | |
| "learning_rate": 1.9221044180922833e-06, | |
| "loss": 0.0415, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.4251497005988023, | |
| "grad_norm": 1.3234814614678005, | |
| "learning_rate": 1.9036070059948253e-06, | |
| "loss": 0.0648, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.4281437125748502, | |
| "grad_norm": 1.5255828023548335, | |
| "learning_rate": 1.885178079943823e-06, | |
| "loss": 0.0629, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.4311377245508983, | |
| "grad_norm": 1.3206269512884048, | |
| "learning_rate": 1.866818047550419e-06, | |
| "loss": 0.0645, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.4341317365269461, | |
| "grad_norm": 1.163786031237905, | |
| "learning_rate": 1.8485273149019655e-06, | |
| "loss": 0.0512, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.437125748502994, | |
| "grad_norm": 1.487069271296729, | |
| "learning_rate": 1.8303062865530407e-06, | |
| "loss": 0.0566, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.4401197604790419, | |
| "grad_norm": 1.1271645095078364, | |
| "learning_rate": 1.8121553655165058e-06, | |
| "loss": 0.0438, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.4431137724550898, | |
| "grad_norm": 1.3122519055420687, | |
| "learning_rate": 1.7940749532545832e-06, | |
| "loss": 0.0539, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.4461077844311379, | |
| "grad_norm": 1.3338257171538912, | |
| "learning_rate": 1.7760654496699876e-06, | |
| "loss": 0.0507, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.4491017964071857, | |
| "grad_norm": 1.1846938587318014, | |
| "learning_rate": 1.7581272530970666e-06, | |
| "loss": 0.0516, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.4520958083832336, | |
| "grad_norm": 1.1247465686333238, | |
| "learning_rate": 1.7402607602930106e-06, | |
| "loss": 0.0537, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.4550898203592815, | |
| "grad_norm": 1.3555926381049734, | |
| "learning_rate": 1.7224663664290537e-06, | |
| "loss": 0.0533, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.4580838323353293, | |
| "grad_norm": 1.2921975993965538, | |
| "learning_rate": 1.7047444650817518e-06, | |
| "loss": 0.0579, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.4610778443113772, | |
| "grad_norm": 1.2498941791673481, | |
| "learning_rate": 1.6870954482242707e-06, | |
| "loss": 0.0472, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.464071856287425, | |
| "grad_norm": 1.247098942808355, | |
| "learning_rate": 1.669519706217711e-06, | |
| "loss": 0.0451, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.467065868263473, | |
| "grad_norm": 1.7164191908811746, | |
| "learning_rate": 1.652017627802487e-06, | |
| "loss": 0.0753, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.4700598802395208, | |
| "grad_norm": 1.218666563421147, | |
| "learning_rate": 1.6345896000897122e-06, | |
| "loss": 0.0551, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.473053892215569, | |
| "grad_norm": 1.4276265638056516, | |
| "learning_rate": 1.6172360085526567e-06, | |
| "loss": 0.0647, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.4760479041916168, | |
| "grad_norm": 1.4209272762272394, | |
| "learning_rate": 1.5999572370182016e-06, | |
| "loss": 0.0609, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.4790419161676647, | |
| "grad_norm": 1.4094437003558362, | |
| "learning_rate": 1.5827536676583643e-06, | |
| "loss": 0.0585, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.4820359281437125, | |
| "grad_norm": 1.2363935139172104, | |
| "learning_rate": 1.5656256809818343e-06, | |
| "loss": 0.056, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.4850299401197604, | |
| "grad_norm": 1.3026215647679429, | |
| "learning_rate": 1.54857365582557e-06, | |
| "loss": 0.0516, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.4880239520958085, | |
| "grad_norm": 1.4527075445163016, | |
| "learning_rate": 1.5315979693464039e-06, | |
| "loss": 0.0622, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.4910179640718564, | |
| "grad_norm": 1.2115883641312897, | |
| "learning_rate": 1.5146989970127158e-06, | |
| "loss": 0.0504, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.4940119760479043, | |
| "grad_norm": 1.5616018638637084, | |
| "learning_rate": 1.4978771125961177e-06, | |
| "loss": 0.0596, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.4970059880239521, | |
| "grad_norm": 1.317862456105658, | |
| "learning_rate": 1.4811326881631937e-06, | |
| "loss": 0.0632, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 1.2392863601669997, | |
| "learning_rate": 1.4644660940672628e-06, | |
| "loss": 0.0578, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.5029940119760479, | |
| "grad_norm": 1.4145105383691612, | |
| "learning_rate": 1.4478776989401949e-06, | |
| "loss": 0.0536, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.5059880239520957, | |
| "grad_norm": 1.4016645666063061, | |
| "learning_rate": 1.4313678696842559e-06, | |
| "loss": 0.0577, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.5089820359281436, | |
| "grad_norm": 1.1995195266450838, | |
| "learning_rate": 1.4149369714639856e-06, | |
| "loss": 0.0509, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.5119760479041915, | |
| "grad_norm": 1.464268462485674, | |
| "learning_rate": 1.3985853676981316e-06, | |
| "loss": 0.0575, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.5149700598802394, | |
| "grad_norm": 1.2214701606979053, | |
| "learning_rate": 1.3823134200516043e-06, | |
| "loss": 0.0462, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.5179640718562875, | |
| "grad_norm": 1.2808101885210073, | |
| "learning_rate": 1.366121488427481e-06, | |
| "loss": 0.0563, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.5209580838323353, | |
| "grad_norm": 1.2261042451998634, | |
| "learning_rate": 1.3500099309590397e-06, | |
| "loss": 0.0501, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.5239520958083832, | |
| "grad_norm": 1.2702205057589262, | |
| "learning_rate": 1.3339791040018479e-06, | |
| "loss": 0.0521, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.5269461077844313, | |
| "grad_norm": 1.673385966219172, | |
| "learning_rate": 1.3180293621258694e-06, | |
| "loss": 0.0681, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.5299401197604792, | |
| "grad_norm": 1.219627539132779, | |
| "learning_rate": 1.3021610581076316e-06, | |
| "loss": 0.0505, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.532934131736527, | |
| "grad_norm": 1.377058607408145, | |
| "learning_rate": 1.2863745429224145e-06, | |
| "loss": 0.0519, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.535928143712575, | |
| "grad_norm": 1.255921805962329, | |
| "learning_rate": 1.270670165736499e-06, | |
| "loss": 0.058, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.5389221556886228, | |
| "grad_norm": 1.5309669496958762, | |
| "learning_rate": 1.2550482738994284e-06, | |
| "loss": 0.0604, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.5419161676646707, | |
| "grad_norm": 1.5156776508811527, | |
| "learning_rate": 1.239509212936343e-06, | |
| "loss": 0.0639, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.5449101796407185, | |
| "grad_norm": 1.2487524398412186, | |
| "learning_rate": 1.22405332654032e-06, | |
| "loss": 0.053, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.5479041916167664, | |
| "grad_norm": 1.4633093516915203, | |
| "learning_rate": 1.2086809565647877e-06, | |
| "loss": 0.0632, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.5508982035928143, | |
| "grad_norm": 1.0978262252728104, | |
| "learning_rate": 1.1933924430159571e-06, | |
| "loss": 0.0427, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.5538922155688621, | |
| "grad_norm": 1.37678778125511, | |
| "learning_rate": 1.1781881240452958e-06, | |
| "loss": 0.0527, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.55688622754491, | |
| "grad_norm": 1.3955843430589898, | |
| "learning_rate": 1.1630683359420653e-06, | |
| "loss": 0.0556, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.5598802395209581, | |
| "grad_norm": 1.5107922453997455, | |
| "learning_rate": 1.1480334131258626e-06, | |
| "loss": 0.0527, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.562874251497006, | |
| "grad_norm": 1.7148771743783326, | |
| "learning_rate": 1.1330836881392405e-06, | |
| "loss": 0.0656, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.5658682634730539, | |
| "grad_norm": 1.4193585798273451, | |
| "learning_rate": 1.11821949164034e-06, | |
| "loss": 0.0546, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.568862275449102, | |
| "grad_norm": 1.174699031677837, | |
| "learning_rate": 1.103441152395588e-06, | |
| "loss": 0.0433, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.5718562874251498, | |
| "grad_norm": 1.2910889224664055, | |
| "learning_rate": 1.088748997272414e-06, | |
| "loss": 0.0567, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.5748502994011977, | |
| "grad_norm": 1.2172124742320245, | |
| "learning_rate": 1.0741433512320316e-06, | |
| "loss": 0.0464, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.5778443113772456, | |
| "grad_norm": 1.2506263998882494, | |
| "learning_rate": 1.0596245373222424e-06, | |
| "loss": 0.0517, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.5808383233532934, | |
| "grad_norm": 1.3267820314815264, | |
| "learning_rate": 1.045192876670298e-06, | |
| "loss": 0.0555, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.5838323353293413, | |
| "grad_norm": 1.294348629485374, | |
| "learning_rate": 1.0308486884757868e-06, | |
| "loss": 0.056, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.5868263473053892, | |
| "grad_norm": 1.1906363661686121, | |
| "learning_rate": 1.0165922900035886e-06, | |
| "loss": 0.0512, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.589820359281437, | |
| "grad_norm": 1.0822066111398383, | |
| "learning_rate": 1.0024239965768417e-06, | |
| "loss": 0.0513, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.592814371257485, | |
| "grad_norm": 1.1269747972718962, | |
| "learning_rate": 9.883441215699824e-07, | |
| "loss": 0.0401, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.5958083832335328, | |
| "grad_norm": 1.4234004697644156, | |
| "learning_rate": 9.74352976401805e-07, | |
| "loss": 0.0534, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.5988023952095807, | |
| "grad_norm": 1.3801644032396991, | |
| "learning_rate": 9.604508705285765e-07, | |
| "loss": 0.0604, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.6017964071856288, | |
| "grad_norm": 1.1030316014646515, | |
| "learning_rate": 9.466381114371942e-07, | |
| "loss": 0.0587, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.6047904191616766, | |
| "grad_norm": 1.360383288045254, | |
| "learning_rate": 9.329150046383773e-07, | |
| "loss": 0.0594, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.6077844311377245, | |
| "grad_norm": 1.2999706149204022, | |
| "learning_rate": 9.192818536599213e-07, | |
| "loss": 0.0555, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.6107784431137726, | |
| "grad_norm": 1.1798670038724433, | |
| "learning_rate": 9.057389600399719e-07, | |
| "loss": 0.0522, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.6137724550898205, | |
| "grad_norm": 1.2410714341604132, | |
| "learning_rate": 8.922866233203681e-07, | |
| "loss": 0.0583, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.6167664670658684, | |
| "grad_norm": 1.0362965442228032, | |
| "learning_rate": 8.789251410400024e-07, | |
| "loss": 0.0474, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.6197604790419162, | |
| "grad_norm": 1.1280595061951908, | |
| "learning_rate": 8.65654808728259e-07, | |
| "loss": 0.0496, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.622754491017964, | |
| "grad_norm": 1.357229552265298, | |
| "learning_rate": 8.524759198984567e-07, | |
| "loss": 0.0535, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.625748502994012, | |
| "grad_norm": 1.4152927255061434, | |
| "learning_rate": 8.393887660413719e-07, | |
| "loss": 0.0578, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.6287425149700598, | |
| "grad_norm": 1.6638054591176314, | |
| "learning_rate": 8.263936366187825e-07, | |
| "loss": 0.0802, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.6317365269461077, | |
| "grad_norm": 1.3034568380102376, | |
| "learning_rate": 8.134908190570723e-07, | |
| "loss": 0.0641, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.6347305389221556, | |
| "grad_norm": 1.3446841301129344, | |
| "learning_rate": 8.006805987408705e-07, | |
| "loss": 0.0566, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.6377245508982035, | |
| "grad_norm": 1.1004900454733844, | |
| "learning_rate": 7.879632590067354e-07, | |
| "loss": 0.0459, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.6407185628742516, | |
| "grad_norm": 1.0927752441627476, | |
| "learning_rate": 7.753390811368972e-07, | |
| "loss": 0.0455, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.6437125748502994, | |
| "grad_norm": 1.356000945321596, | |
| "learning_rate": 7.628083443530287e-07, | |
| "loss": 0.0551, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.6467065868263473, | |
| "grad_norm": 1.45300344454185, | |
| "learning_rate": 7.503713258100726e-07, | |
| "loss": 0.0688, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.6497005988023952, | |
| "grad_norm": 1.3919709416464512, | |
| "learning_rate": 7.380283005901084e-07, | |
| "loss": 0.0579, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.6526946107784433, | |
| "grad_norm": 1.100190857152072, | |
| "learning_rate": 7.257795416962754e-07, | |
| "loss": 0.044, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.6556886227544911, | |
| "grad_norm": 1.0466576577865268, | |
| "learning_rate": 7.136253200467231e-07, | |
| "loss": 0.0351, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.658682634730539, | |
| "grad_norm": 1.160479318589764, | |
| "learning_rate": 7.015659044686307e-07, | |
| "loss": 0.0464, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.6616766467065869, | |
| "grad_norm": 1.3712720541911383, | |
| "learning_rate": 6.896015616922535e-07, | |
| "loss": 0.0517, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.6646706586826348, | |
| "grad_norm": 1.354548232375003, | |
| "learning_rate": 6.777325563450282e-07, | |
| "loss": 0.0583, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.6676646706586826, | |
| "grad_norm": 1.3828462971441577, | |
| "learning_rate": 6.659591509457125e-07, | |
| "loss": 0.0645, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.6706586826347305, | |
| "grad_norm": 1.376575619616543, | |
| "learning_rate": 6.542816058985896e-07, | |
| "loss": 0.0499, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.6736526946107784, | |
| "grad_norm": 1.3420389438313671, | |
| "learning_rate": 6.427001794876974e-07, | |
| "loss": 0.0629, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.6766467065868262, | |
| "grad_norm": 1.107828026244648, | |
| "learning_rate": 6.312151278711237e-07, | |
| "loss": 0.0406, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.6796407185628741, | |
| "grad_norm": 1.1760708065953727, | |
| "learning_rate": 6.198267050753387e-07, | |
| "loss": 0.0478, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.6826347305389222, | |
| "grad_norm": 1.286224946314711, | |
| "learning_rate": 6.085351629895736e-07, | |
| "loss": 0.0547, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.68562874251497, | |
| "grad_norm": 1.2975238756348946, | |
| "learning_rate": 5.973407513602514e-07, | |
| "loss": 0.051, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.688622754491018, | |
| "grad_norm": 1.39851125490302, | |
| "learning_rate": 5.862437177854629e-07, | |
| "loss": 0.0616, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.6916167664670658, | |
| "grad_norm": 1.363222261496164, | |
| "learning_rate": 5.752443077094927e-07, | |
| "loss": 0.0515, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.694610778443114, | |
| "grad_norm": 1.1809116093986705, | |
| "learning_rate": 5.643427644173838e-07, | |
| "loss": 0.0477, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.6976047904191618, | |
| "grad_norm": 1.2814464845304079, | |
| "learning_rate": 5.535393290295643e-07, | |
| "loss": 0.0527, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.7005988023952097, | |
| "grad_norm": 1.137761688892092, | |
| "learning_rate": 5.428342404965076e-07, | |
| "loss": 0.0492, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.7035928143712575, | |
| "grad_norm": 1.2024352568359806, | |
| "learning_rate": 5.322277355934557e-07, | |
| "loss": 0.0542, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.7065868263473054, | |
| "grad_norm": 1.0270097905556121, | |
| "learning_rate": 5.217200489151714e-07, | |
| "loss": 0.0413, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.7095808383233533, | |
| "grad_norm": 1.1814327986956994, | |
| "learning_rate": 5.113114128707592e-07, | |
| "loss": 0.0497, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.7125748502994012, | |
| "grad_norm": 1.371110628786311, | |
| "learning_rate": 5.010020576785174e-07, | |
| "loss": 0.0564, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.715568862275449, | |
| "grad_norm": 1.7228537686756142, | |
| "learning_rate": 4.907922113608532e-07, | |
| "loss": 0.0577, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.718562874251497, | |
| "grad_norm": 1.3293936818082837, | |
| "learning_rate": 4.806820997392325e-07, | |
| "loss": 0.0605, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.7215568862275448, | |
| "grad_norm": 1.0593721943735672, | |
| "learning_rate": 4.7067194642919036e-07, | |
| "loss": 0.0456, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.7245508982035929, | |
| "grad_norm": 1.3209316998108327, | |
| "learning_rate": 4.607619728353818e-07, | |
| "loss": 0.0459, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.7275449101796407, | |
| "grad_norm": 1.506825767696583, | |
| "learning_rate": 4.50952398146689e-07, | |
| "loss": 0.0541, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.7305389221556886, | |
| "grad_norm": 1.6644704026704347, | |
| "learning_rate": 4.4124343933136525e-07, | |
| "loss": 0.0502, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.7335329341317365, | |
| "grad_norm": 1.2975129204276974, | |
| "learning_rate": 4.3163531113224466e-07, | |
| "loss": 0.0564, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.7365269461077846, | |
| "grad_norm": 1.6279140988962861, | |
| "learning_rate": 4.221282260619891e-07, | |
| "loss": 0.0658, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.7395209580838324, | |
| "grad_norm": 1.1267198945816772, | |
| "learning_rate": 4.127223943983849e-07, | |
| "loss": 0.0419, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.7425149700598803, | |
| "grad_norm": 1.3377279651851868, | |
| "learning_rate": 4.03418024179697e-07, | |
| "loss": 0.0492, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.7455089820359282, | |
| "grad_norm": 1.2526979862343541, | |
| "learning_rate": 3.9421532120006544e-07, | |
| "loss": 0.0542, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.748502994011976, | |
| "grad_norm": 1.1554289330273324, | |
| "learning_rate": 3.851144890049535e-07, | |
| "loss": 0.0487, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.751497005988024, | |
| "grad_norm": 1.1496568847583863, | |
| "learning_rate": 3.761157288866418e-07, | |
| "loss": 0.0557, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.7544910179640718, | |
| "grad_norm": 1.2028648018386747, | |
| "learning_rate": 3.672192398797858e-07, | |
| "loss": 0.046, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.7574850299401197, | |
| "grad_norm": 1.523329910979175, | |
| "learning_rate": 3.58425218757002e-07, | |
| "loss": 0.0582, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.7604790419161676, | |
| "grad_norm": 1.313477071043625, | |
| "learning_rate": 3.497338600245254e-07, | |
| "loss": 0.0514, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.7634730538922154, | |
| "grad_norm": 1.3787630655983611, | |
| "learning_rate": 3.4114535591790233e-07, | |
| "loss": 0.0484, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.7664670658682635, | |
| "grad_norm": 1.396069349753579, | |
| "learning_rate": 3.326598963977395e-07, | |
| "loss": 0.0466, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.7694610778443114, | |
| "grad_norm": 1.295467444271738, | |
| "learning_rate": 3.242776691455013e-07, | |
| "loss": 0.0562, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.7724550898203593, | |
| "grad_norm": 1.1748565325783131, | |
| "learning_rate": 3.159988595593616e-07, | |
| "loss": 0.0469, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.7754491017964071, | |
| "grad_norm": 1.1493358526240753, | |
| "learning_rate": 3.078236507501015e-07, | |
| "loss": 0.0465, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.7784431137724552, | |
| "grad_norm": 1.271227077425924, | |
| "learning_rate": 2.9975222353705757e-07, | |
| "loss": 0.0556, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.781437125748503, | |
| "grad_norm": 1.4784103113421017, | |
| "learning_rate": 2.917847564441256e-07, | |
| "loss": 0.0626, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.784431137724551, | |
| "grad_norm": 1.2251147311974475, | |
| "learning_rate": 2.839214256958106e-07, | |
| "loss": 0.0585, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.7874251497005988, | |
| "grad_norm": 1.221355096564607, | |
| "learning_rate": 2.7616240521332884e-07, | |
| "loss": 0.0537, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.7904191616766467, | |
| "grad_norm": 1.3723576904766543, | |
| "learning_rate": 2.6850786661076047e-07, | |
| "loss": 0.0496, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.7934131736526946, | |
| "grad_norm": 1.2101900243619497, | |
| "learning_rate": 2.6095797919125533e-07, | |
| "loss": 0.0447, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.7964071856287425, | |
| "grad_norm": 1.2531955111305382, | |
| "learning_rate": 2.5351290994328703e-07, | |
| "loss": 0.0497, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.7964071856287425, | |
| "eval_loss": 0.1921154260635376, | |
| "eval_runtime": 3.0862, | |
| "eval_samples_per_second": 17.498, | |
| "eval_steps_per_second": 4.536, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.7994011976047903, | |
| "grad_norm": 1.1837614701595274, | |
| "learning_rate": 2.4617282353696093e-07, | |
| "loss": 0.0561, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.8023952095808382, | |
| "grad_norm": 1.4375832269988713, | |
| "learning_rate": 2.3893788232036807e-07, | |
| "loss": 0.0587, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.805389221556886, | |
| "grad_norm": 1.301694421401321, | |
| "learning_rate": 2.318082463160032e-07, | |
| "loss": 0.0551, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.8083832335329342, | |
| "grad_norm": 1.3075374303743317, | |
| "learning_rate": 2.2478407321721295e-07, | |
| "loss": 0.0486, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.811377245508982, | |
| "grad_norm": 1.2946401959782214, | |
| "learning_rate": 2.1786551838471892e-07, | |
| "loss": 0.0621, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.81437125748503, | |
| "grad_norm": 1.294054809362444, | |
| "learning_rate": 2.1105273484317402e-07, | |
| "loss": 0.0474, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.8173652694610778, | |
| "grad_norm": 1.1382901130215402, | |
| "learning_rate": 2.043458732777831e-07, | |
| "loss": 0.0465, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.8203592814371259, | |
| "grad_norm": 1.4864606360898223, | |
| "learning_rate": 1.9774508203096843e-07, | |
| "loss": 0.0568, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.8233532934131738, | |
| "grad_norm": 1.0058713159814512, | |
| "learning_rate": 1.9125050709908388e-07, | |
| "loss": 0.0403, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.8263473053892216, | |
| "grad_norm": 1.3107911795112779, | |
| "learning_rate": 1.8486229212919482e-07, | |
| "loss": 0.0509, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.8293413173652695, | |
| "grad_norm": 1.154991320693083, | |
| "learning_rate": 1.7858057841589281e-07, | |
| "loss": 0.0511, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.8323353293413174, | |
| "grad_norm": 1.1387139872217795, | |
| "learning_rate": 1.7240550489817652e-07, | |
| "loss": 0.0428, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.8353293413173652, | |
| "grad_norm": 1.328973244771441, | |
| "learning_rate": 1.66337208156373e-07, | |
| "loss": 0.0497, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.8383233532934131, | |
| "grad_norm": 1.6658914312264914, | |
| "learning_rate": 1.6037582240912175e-07, | |
| "loss": 0.0667, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.841317365269461, | |
| "grad_norm": 1.538256005466908, | |
| "learning_rate": 1.5452147951040165e-07, | |
| "loss": 0.0677, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.8443113772455089, | |
| "grad_norm": 1.3011915269675407, | |
| "learning_rate": 1.4877430894662037e-07, | |
| "loss": 0.0568, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.8473053892215567, | |
| "grad_norm": 1.1220703472107825, | |
| "learning_rate": 1.4313443783374405e-07, | |
| "loss": 0.0414, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.8502994011976048, | |
| "grad_norm": 1.1135262526310337, | |
| "learning_rate": 1.3760199091449045e-07, | |
| "loss": 0.0416, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.8532934131736527, | |
| "grad_norm": 1.1520042452784147, | |
| "learning_rate": 1.3217709055556638e-07, | |
| "loss": 0.0473, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.8562874251497006, | |
| "grad_norm": 1.3455058956268287, | |
| "learning_rate": 1.268598567449647e-07, | |
| "loss": 0.0543, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.8592814371257484, | |
| "grad_norm": 1.236896416838321, | |
| "learning_rate": 1.2165040708930763e-07, | |
| "loss": 0.0545, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.8622754491017965, | |
| "grad_norm": 1.0964413266580448, | |
| "learning_rate": 1.1654885681124661e-07, | |
| "loss": 0.0489, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.8652694610778444, | |
| "grad_norm": 1.288425704507219, | |
| "learning_rate": 1.1155531874691372e-07, | |
| "loss": 0.0476, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.8682634730538923, | |
| "grad_norm": 1.1821277286699987, | |
| "learning_rate": 1.0666990334342708e-07, | |
| "loss": 0.0479, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.8712574850299402, | |
| "grad_norm": 1.259640954988431, | |
| "learning_rate": 1.0189271865644445e-07, | |
| "loss": 0.045, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.874251497005988, | |
| "grad_norm": 1.539702581116964, | |
| "learning_rate": 9.722387034777847e-08, | |
| "loss": 0.0544, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.877245508982036, | |
| "grad_norm": 1.1992313605886817, | |
| "learning_rate": 9.266346168305518e-08, | |
| "loss": 0.0578, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.8802395209580838, | |
| "grad_norm": 1.0859227800973141, | |
| "learning_rate": 8.821159352943142e-08, | |
| "loss": 0.0468, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.8832335329341316, | |
| "grad_norm": 1.2701213231838364, | |
| "learning_rate": 8.38683643533661e-08, | |
| "loss": 0.0455, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.8862275449101795, | |
| "grad_norm": 1.0866635758919574, | |
| "learning_rate": 7.963387021843683e-08, | |
| "loss": 0.0365, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.8892215568862274, | |
| "grad_norm": 1.4263331384221776, | |
| "learning_rate": 7.550820478322285e-08, | |
| "loss": 0.063, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.8922155688622755, | |
| "grad_norm": 1.4051451596866118, | |
| "learning_rate": 7.149145929922607e-08, | |
| "loss": 0.0683, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.8952095808383234, | |
| "grad_norm": 1.5148245439497718, | |
| "learning_rate": 6.758372260885714e-08, | |
| "loss": 0.0618, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.8982035928143712, | |
| "grad_norm": 1.4576622975710207, | |
| "learning_rate": 6.378508114346982e-08, | |
| "loss": 0.063, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.9011976047904193, | |
| "grad_norm": 1.1822817141445092, | |
| "learning_rate": 6.009561892144744e-08, | |
| "loss": 0.0453, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.9041916167664672, | |
| "grad_norm": 1.0535232191261101, | |
| "learning_rate": 5.651541754634726e-08, | |
| "loss": 0.0399, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.907185628742515, | |
| "grad_norm": 1.5845291775860704, | |
| "learning_rate": 5.304455620509297e-08, | |
| "loss": 0.0585, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.910179640718563, | |
| "grad_norm": 1.3366854486937365, | |
| "learning_rate": 4.968311166622553e-08, | |
| "loss": 0.05, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.9131736526946108, | |
| "grad_norm": 1.2858746801563086, | |
| "learning_rate": 4.643115827820399e-08, | |
| "loss": 0.0575, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.9161676646706587, | |
| "grad_norm": 1.4461948782552816, | |
| "learning_rate": 4.328876796776071e-08, | |
| "loss": 0.0502, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.9191616766467066, | |
| "grad_norm": 1.281164774535854, | |
| "learning_rate": 4.0256010238310936e-08, | |
| "loss": 0.0554, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.9221556886227544, | |
| "grad_norm": 1.281370310857064, | |
| "learning_rate": 3.733295216841626e-08, | |
| "loss": 0.057, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.9251497005988023, | |
| "grad_norm": 1.509915281532266, | |
| "learning_rate": 3.451965841029914e-08, | |
| "loss": 0.0557, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.9281437125748502, | |
| "grad_norm": 1.2080168332772145, | |
| "learning_rate": 3.181619118841517e-08, | |
| "loss": 0.057, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.931137724550898, | |
| "grad_norm": 1.0917510373047643, | |
| "learning_rate": 2.9222610298074717e-08, | |
| "loss": 0.0383, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.9341317365269461, | |
| "grad_norm": 1.176031045498904, | |
| "learning_rate": 2.673897310412288e-08, | |
| "loss": 0.0491, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.937125748502994, | |
| "grad_norm": 1.1442837017006728, | |
| "learning_rate": 2.4365334539667717e-08, | |
| "loss": 0.0458, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.9401197604790419, | |
| "grad_norm": 1.2752655146869405, | |
| "learning_rate": 2.210174710486679e-08, | |
| "loss": 0.0598, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.94311377245509, | |
| "grad_norm": 1.121680818437194, | |
| "learning_rate": 1.99482608657664e-08, | |
| "loss": 0.0448, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.9461077844311379, | |
| "grad_norm": 1.141645206909454, | |
| "learning_rate": 1.7904923453193056e-08, | |
| "loss": 0.0491, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.9491017964071857, | |
| "grad_norm": 1.2576065473761184, | |
| "learning_rate": 1.5971780061701524e-08, | |
| "loss": 0.0459, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.9520958083832336, | |
| "grad_norm": 1.1705723583046324, | |
| "learning_rate": 1.4148873448573408e-08, | |
| "loss": 0.0419, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.9550898203592815, | |
| "grad_norm": 1.4987075589305316, | |
| "learning_rate": 1.2436243932872349e-08, | |
| "loss": 0.0698, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.9580838323353293, | |
| "grad_norm": 1.1810525968997512, | |
| "learning_rate": 1.0833929394552523e-08, | |
| "loss": 0.0474, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.9610778443113772, | |
| "grad_norm": 1.422854814423524, | |
| "learning_rate": 9.341965273621522e-09, | |
| "loss": 0.0511, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.964071856287425, | |
| "grad_norm": 1.3813663852441627, | |
| "learning_rate": 7.96038456935322e-09, | |
| "loss": 0.0572, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.967065868263473, | |
| "grad_norm": 1.3242663257925578, | |
| "learning_rate": 6.6892178395611125e-09, | |
| "loss": 0.0541, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.9700598802395208, | |
| "grad_norm": 1.275745084691783, | |
| "learning_rate": 5.528493199922769e-09, | |
| "loss": 0.0576, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.9730538922155687, | |
| "grad_norm": 1.0876088177862226, | |
| "learning_rate": 4.478236323355312e-09, | |
| "loss": 0.0487, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.9760479041916168, | |
| "grad_norm": 1.2739274147042596, | |
| "learning_rate": 3.538470439448105e-09, | |
| "loss": 0.0539, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.9790419161676647, | |
| "grad_norm": 1.306317943424489, | |
| "learning_rate": 2.709216333952602e-09, | |
| "loss": 0.0592, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.9820359281437125, | |
| "grad_norm": 1.0938067445243638, | |
| "learning_rate": 1.9904923483171632e-09, | |
| "loss": 0.0527, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.9850299401197606, | |
| "grad_norm": 1.5410805596226655, | |
| "learning_rate": 1.3823143792851545e-09, | |
| "loss": 0.0645, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.9880239520958085, | |
| "grad_norm": 1.4429819363035135, | |
| "learning_rate": 8.846958785418969e-10, | |
| "loss": 0.0618, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.9910179640718564, | |
| "grad_norm": 0.9892537056478168, | |
| "learning_rate": 4.97647852417682e-10, | |
| "loss": 0.0398, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.9940119760479043, | |
| "grad_norm": 1.2004382169543146, | |
| "learning_rate": 2.2117886164407797e-10, | |
| "loss": 0.0499, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.9970059880239521, | |
| "grad_norm": 1.4293086009137985, | |
| "learning_rate": 5.529502116519148e-11, | |
| "loss": 0.0525, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.7678990870914749, | |
| "learning_rate": 0.0, | |
| "loss": 0.0214, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 668, | |
| "total_flos": 8355537027072.0, | |
| "train_loss": 0.11741596177599566, | |
| "train_runtime": 835.2163, | |
| "train_samples_per_second": 12.761, | |
| "train_steps_per_second": 0.8 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 668, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8355537027072.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |