| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2386, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0020955574182732607, | |
| "grad_norm": 0.5915184020996094, | |
| "learning_rate": 0.0001996647108130763, | |
| "loss": 1.2488, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.004191114836546521, | |
| "grad_norm": 0.6416106224060059, | |
| "learning_rate": 0.00019924559932942164, | |
| "loss": 0.8074, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.006286672254819782, | |
| "grad_norm": 0.47403833270072937, | |
| "learning_rate": 0.000198826487845767, | |
| "loss": 0.5298, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.008382229673093043, | |
| "grad_norm": 0.49060583114624023, | |
| "learning_rate": 0.00019840737636211232, | |
| "loss": 0.4396, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.010477787091366304, | |
| "grad_norm": 0.3994845449924469, | |
| "learning_rate": 0.00019798826487845767, | |
| "loss": 0.3997, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.012573344509639563, | |
| "grad_norm": 5.743133068084717, | |
| "learning_rate": 0.00019756915339480302, | |
| "loss": 0.3571, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.014668901927912825, | |
| "grad_norm": 0.4928306043148041, | |
| "learning_rate": 0.00019715004191114837, | |
| "loss": 0.3467, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.016764459346186086, | |
| "grad_norm": 333.243896484375, | |
| "learning_rate": 0.00019673093042749373, | |
| "loss": 0.3173, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.018860016764459347, | |
| "grad_norm": 0.5272846817970276, | |
| "learning_rate": 0.00019631181894383908, | |
| "loss": 0.2952, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.020955574182732608, | |
| "grad_norm": 3.5901286602020264, | |
| "learning_rate": 0.00019589270746018443, | |
| "loss": 0.2935, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02305113160100587, | |
| "grad_norm": 0.5013518929481506, | |
| "learning_rate": 0.00019547359597652975, | |
| "loss": 0.3024, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.025146689019279127, | |
| "grad_norm": 16.086902618408203, | |
| "learning_rate": 0.0001950544844928751, | |
| "loss": 0.2683, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.027242246437552388, | |
| "grad_norm": 0.30715975165367126, | |
| "learning_rate": 0.00019463537300922046, | |
| "loss": 0.2653, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.02933780385582565, | |
| "grad_norm": 0.33719402551651, | |
| "learning_rate": 0.0001942162615255658, | |
| "loss": 0.2575, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03143336127409891, | |
| "grad_norm": 0.3756738305091858, | |
| "learning_rate": 0.00019379715004191116, | |
| "loss": 0.2449, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.03352891869237217, | |
| "grad_norm": 0.3635186553001404, | |
| "learning_rate": 0.0001933780385582565, | |
| "loss": 0.2538, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.03562447611064543, | |
| "grad_norm": 0.3560231328010559, | |
| "learning_rate": 0.00019295892707460186, | |
| "loss": 0.2435, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.037720033528918694, | |
| "grad_norm": 0.371216356754303, | |
| "learning_rate": 0.00019253981559094722, | |
| "loss": 0.2329, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.039815590947191955, | |
| "grad_norm": 0.45535510778427124, | |
| "learning_rate": 0.00019212070410729254, | |
| "loss": 0.2338, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.041911148365465216, | |
| "grad_norm": 15.063165664672852, | |
| "learning_rate": 0.0001917015926236379, | |
| "loss": 0.2353, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.04400670578373848, | |
| "grad_norm": 0.308463454246521, | |
| "learning_rate": 0.00019128248113998324, | |
| "loss": 0.2323, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.04610226320201174, | |
| "grad_norm": 0.29910165071487427, | |
| "learning_rate": 0.0001908633696563286, | |
| "loss": 0.2222, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04819782062028499, | |
| "grad_norm": 0.3254060745239258, | |
| "learning_rate": 0.00019044425817267395, | |
| "loss": 0.216, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.050293378038558254, | |
| "grad_norm": 0.2623255252838135, | |
| "learning_rate": 0.0001900251466890193, | |
| "loss": 0.2136, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.052388935456831515, | |
| "grad_norm": 0.2627584636211395, | |
| "learning_rate": 0.00018960603520536465, | |
| "loss": 0.2118, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.054484492875104776, | |
| "grad_norm": 0.28595617413520813, | |
| "learning_rate": 0.00018918692372170998, | |
| "loss": 0.2088, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05658005029337804, | |
| "grad_norm": 0.550282895565033, | |
| "learning_rate": 0.00018876781223805533, | |
| "loss": 0.2198, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.0586756077116513, | |
| "grad_norm": 0.296385258436203, | |
| "learning_rate": 0.00018834870075440068, | |
| "loss": 0.2219, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.06077116512992456, | |
| "grad_norm": 0.3291824460029602, | |
| "learning_rate": 0.00018792958927074603, | |
| "loss": 0.2059, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.06286672254819782, | |
| "grad_norm": 0.27933308482170105, | |
| "learning_rate": 0.00018751047778709136, | |
| "loss": 0.2001, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06496227996647108, | |
| "grad_norm": 0.2848331332206726, | |
| "learning_rate": 0.00018709136630343674, | |
| "loss": 0.1999, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.06705783738474434, | |
| "grad_norm": 0.25534388422966003, | |
| "learning_rate": 0.0001866722548197821, | |
| "loss": 0.1977, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0691533948030176, | |
| "grad_norm": 2.9550936222076416, | |
| "learning_rate": 0.0001862531433361274, | |
| "loss": 0.2004, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.07124895222129086, | |
| "grad_norm": 0.35537609457969666, | |
| "learning_rate": 0.00018583403185247276, | |
| "loss": 0.2131, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.07334450963956413, | |
| "grad_norm": 47.72193908691406, | |
| "learning_rate": 0.00018541492036881812, | |
| "loss": 0.2016, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.07544006705783739, | |
| "grad_norm": 0.3546775281429291, | |
| "learning_rate": 0.00018499580888516347, | |
| "loss": 0.1998, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07753562447611065, | |
| "grad_norm": 0.2701320946216583, | |
| "learning_rate": 0.0001845766974015088, | |
| "loss": 0.1948, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.07963118189438391, | |
| "grad_norm": 0.2246120572090149, | |
| "learning_rate": 0.00018415758591785414, | |
| "loss": 0.1882, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.08172673931265717, | |
| "grad_norm": 0.27891814708709717, | |
| "learning_rate": 0.00018373847443419952, | |
| "loss": 0.1879, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.08382229673093043, | |
| "grad_norm": 0.4772103428840637, | |
| "learning_rate": 0.00018331936295054485, | |
| "loss": 0.2069, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08591785414920369, | |
| "grad_norm": 2.367286205291748, | |
| "learning_rate": 0.0001829002514668902, | |
| "loss": 0.2043, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.08801341156747695, | |
| "grad_norm": 0.30191686749458313, | |
| "learning_rate": 0.00018248113998323555, | |
| "loss": 0.1978, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.09010896898575022, | |
| "grad_norm": 0.22913698852062225, | |
| "learning_rate": 0.0001820620284995809, | |
| "loss": 0.1937, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.09220452640402348, | |
| "grad_norm": 0.26689326763153076, | |
| "learning_rate": 0.00018164291701592623, | |
| "loss": 0.1892, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09430008382229674, | |
| "grad_norm": 0.26441484689712524, | |
| "learning_rate": 0.00018122380553227158, | |
| "loss": 0.1924, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.09639564124056998, | |
| "grad_norm": 0.2602805197238922, | |
| "learning_rate": 0.00018080469404861696, | |
| "loss": 0.1856, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09849119865884325, | |
| "grad_norm": 0.21042250096797943, | |
| "learning_rate": 0.0001803855825649623, | |
| "loss": 0.1848, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.10058675607711651, | |
| "grad_norm": 1.8752623796463013, | |
| "learning_rate": 0.00017996647108130763, | |
| "loss": 0.1822, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.10268231349538977, | |
| "grad_norm": 0.3879601061344147, | |
| "learning_rate": 0.00017954735959765299, | |
| "loss": 0.1979, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.10477787091366303, | |
| "grad_norm": 0.27347472310066223, | |
| "learning_rate": 0.00017912824811399834, | |
| "loss": 0.1903, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10687342833193629, | |
| "grad_norm": 0.24279960989952087, | |
| "learning_rate": 0.0001787091366303437, | |
| "loss": 0.1848, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.10896898575020955, | |
| "grad_norm": 0.275285005569458, | |
| "learning_rate": 0.00017829002514668901, | |
| "loss": 0.1848, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.11106454316848281, | |
| "grad_norm": 0.33457309007644653, | |
| "learning_rate": 0.00017787091366303437, | |
| "loss": 0.1851, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.11316010058675607, | |
| "grad_norm": 0.5345426797866821, | |
| "learning_rate": 0.00017745180217937974, | |
| "loss": 0.189, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.11525565800502934, | |
| "grad_norm": 0.3533399701118469, | |
| "learning_rate": 0.00017703269069572507, | |
| "loss": 0.1835, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.1173512154233026, | |
| "grad_norm": 0.3257920742034912, | |
| "learning_rate": 0.00017661357921207042, | |
| "loss": 0.1892, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.11944677284157586, | |
| "grad_norm": 0.21177901327610016, | |
| "learning_rate": 0.00017619446772841577, | |
| "loss": 0.1818, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.12154233025984912, | |
| "grad_norm": 0.20798452198505402, | |
| "learning_rate": 0.00017577535624476112, | |
| "loss": 0.1792, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.12363788767812238, | |
| "grad_norm": 0.21963848173618317, | |
| "learning_rate": 0.00017535624476110645, | |
| "loss": 0.1776, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.12573344509639564, | |
| "grad_norm": 0.22277575731277466, | |
| "learning_rate": 0.0001749371332774518, | |
| "loss": 0.178, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12782900251466892, | |
| "grad_norm": 0.22803856432437897, | |
| "learning_rate": 0.00017451802179379715, | |
| "loss": 0.1732, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.12992455993294216, | |
| "grad_norm": 0.20474207401275635, | |
| "learning_rate": 0.0001740989103101425, | |
| "loss": 0.173, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.1320201173512154, | |
| "grad_norm": 0.23283089697360992, | |
| "learning_rate": 0.00017367979882648786, | |
| "loss": 0.1752, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.13411567476948869, | |
| "grad_norm": 0.24834850430488586, | |
| "learning_rate": 0.0001732606873428332, | |
| "loss": 0.1727, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.13621123218776193, | |
| "grad_norm": 0.2943226993083954, | |
| "learning_rate": 0.00017284157585917856, | |
| "loss": 0.1758, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.1383067896060352, | |
| "grad_norm": 0.28577786684036255, | |
| "learning_rate": 0.00017242246437552388, | |
| "loss": 0.1835, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.14040234702430845, | |
| "grad_norm": 0.22449374198913574, | |
| "learning_rate": 0.00017200335289186924, | |
| "loss": 0.1795, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.14249790444258173, | |
| "grad_norm": 0.3676876425743103, | |
| "learning_rate": 0.0001715842414082146, | |
| "loss": 0.1771, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.14459346186085498, | |
| "grad_norm": 0.4172014594078064, | |
| "learning_rate": 0.00017116512992455994, | |
| "loss": 0.197, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.14668901927912825, | |
| "grad_norm": 0.26507022976875305, | |
| "learning_rate": 0.0001707460184409053, | |
| "loss": 0.181, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.1487845766974015, | |
| "grad_norm": 0.2308947890996933, | |
| "learning_rate": 0.00017032690695725064, | |
| "loss": 0.1767, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.15088013411567477, | |
| "grad_norm": 0.30771297216415405, | |
| "learning_rate": 0.000169907795473596, | |
| "loss": 0.1832, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.15297569153394802, | |
| "grad_norm": 0.23101018369197845, | |
| "learning_rate": 0.00016948868398994132, | |
| "loss": 0.1811, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.1550712489522213, | |
| "grad_norm": 0.20816421508789062, | |
| "learning_rate": 0.00016906957250628667, | |
| "loss": 0.1755, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.15716680637049454, | |
| "grad_norm": 0.2107459455728531, | |
| "learning_rate": 0.00016865046102263202, | |
| "loss": 0.1726, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.15926236378876782, | |
| "grad_norm": 2.224956750869751, | |
| "learning_rate": 0.00016823134953897737, | |
| "loss": 0.1718, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.16135792120704107, | |
| "grad_norm": 0.22022977471351624, | |
| "learning_rate": 0.00016781223805532273, | |
| "loss": 0.1725, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.16345347862531434, | |
| "grad_norm": 0.1845213919878006, | |
| "learning_rate": 0.00016739312657166808, | |
| "loss": 0.1677, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.1655490360435876, | |
| "grad_norm": 0.21298770606517792, | |
| "learning_rate": 0.00016697401508801343, | |
| "loss": 0.1734, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.16764459346186086, | |
| "grad_norm": 0.20876356959342957, | |
| "learning_rate": 0.00016655490360435878, | |
| "loss": 0.1761, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1697401508801341, | |
| "grad_norm": 0.21667592227458954, | |
| "learning_rate": 0.0001661357921207041, | |
| "loss": 0.1668, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.17183570829840739, | |
| "grad_norm": 2.4301416873931885, | |
| "learning_rate": 0.00016571668063704946, | |
| "loss": 0.1697, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.17393126571668063, | |
| "grad_norm": 0.43861570954322815, | |
| "learning_rate": 0.0001652975691533948, | |
| "loss": 0.1804, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.1760268231349539, | |
| "grad_norm": 0.2521713078022003, | |
| "learning_rate": 0.00016487845766974016, | |
| "loss": 0.1762, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.17812238055322716, | |
| "grad_norm": 0.20923659205436707, | |
| "learning_rate": 0.00016445934618608551, | |
| "loss": 0.1695, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.18021793797150043, | |
| "grad_norm": 0.20591603219509125, | |
| "learning_rate": 0.00016404023470243087, | |
| "loss": 0.1729, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.18231349538977368, | |
| "grad_norm": 2.5188469886779785, | |
| "learning_rate": 0.00016362112321877622, | |
| "loss": 0.1723, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.18440905280804695, | |
| "grad_norm": 0.2523597478866577, | |
| "learning_rate": 0.00016320201173512154, | |
| "loss": 0.174, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.1865046102263202, | |
| "grad_norm": 0.23274292051792145, | |
| "learning_rate": 0.0001627829002514669, | |
| "loss": 0.1707, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.18860016764459347, | |
| "grad_norm": 0.26748543977737427, | |
| "learning_rate": 0.00016236378876781225, | |
| "loss": 0.1686, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.19069572506286672, | |
| "grad_norm": 0.2876422107219696, | |
| "learning_rate": 0.0001619446772841576, | |
| "loss": 0.1723, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.19279128248113997, | |
| "grad_norm": 0.5347093939781189, | |
| "learning_rate": 0.00016152556580050292, | |
| "loss": 0.1873, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.19488683989941324, | |
| "grad_norm": 0.2463475912809372, | |
| "learning_rate": 0.0001611064543168483, | |
| "loss": 0.1817, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.1969823973176865, | |
| "grad_norm": 0.21807396411895752, | |
| "learning_rate": 0.00016068734283319365, | |
| "loss": 0.1751, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.19907795473595977, | |
| "grad_norm": 0.1950008124113083, | |
| "learning_rate": 0.00016026823134953898, | |
| "loss": 0.1723, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.20117351215423301, | |
| "grad_norm": 0.19139733910560608, | |
| "learning_rate": 0.00015984911986588433, | |
| "loss": 0.168, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2032690695725063, | |
| "grad_norm": 0.24496974050998688, | |
| "learning_rate": 0.00015943000838222968, | |
| "loss": 0.1713, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.20536462699077954, | |
| "grad_norm": 0.2883533239364624, | |
| "learning_rate": 0.00015901089689857503, | |
| "loss": 0.1758, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2074601844090528, | |
| "grad_norm": 0.2685905694961548, | |
| "learning_rate": 0.00015859178541492036, | |
| "loss": 0.1789, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.20955574182732606, | |
| "grad_norm": 0.22861076891422272, | |
| "learning_rate": 0.0001581726739312657, | |
| "loss": 0.1751, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.21165129924559933, | |
| "grad_norm": 0.4216708242893219, | |
| "learning_rate": 0.0001577535624476111, | |
| "loss": 0.1765, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.21374685666387258, | |
| "grad_norm": 0.33128198981285095, | |
| "learning_rate": 0.0001573344509639564, | |
| "loss": 0.1908, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.21584241408214586, | |
| "grad_norm": 0.21954227983951569, | |
| "learning_rate": 0.00015691533948030176, | |
| "loss": 0.1707, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.2179379715004191, | |
| "grad_norm": 0.22218674421310425, | |
| "learning_rate": 0.00015649622799664712, | |
| "loss": 0.1695, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.22003352891869238, | |
| "grad_norm": 0.23518136143684387, | |
| "learning_rate": 0.00015607711651299247, | |
| "loss": 0.1677, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.22212908633696563, | |
| "grad_norm": 0.4071219861507416, | |
| "learning_rate": 0.0001556580050293378, | |
| "loss": 0.1753, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2242246437552389, | |
| "grad_norm": 0.25883597135543823, | |
| "learning_rate": 0.00015523889354568314, | |
| "loss": 0.1741, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.22632020117351215, | |
| "grad_norm": 0.19170518219470978, | |
| "learning_rate": 0.00015481978206202852, | |
| "loss": 0.17, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.22841575859178542, | |
| "grad_norm": 0.18559418618679047, | |
| "learning_rate": 0.00015440067057837387, | |
| "loss": 0.1701, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.23051131601005867, | |
| "grad_norm": 0.2035888284444809, | |
| "learning_rate": 0.0001539815590947192, | |
| "loss": 0.1623, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.23260687342833195, | |
| "grad_norm": 7.4185709953308105, | |
| "learning_rate": 0.00015356244761106455, | |
| "loss": 0.1654, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.2347024308466052, | |
| "grad_norm": 0.21483619511127472, | |
| "learning_rate": 0.0001531433361274099, | |
| "loss": 0.1664, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.23679798826487847, | |
| "grad_norm": 0.2908990979194641, | |
| "learning_rate": 0.00015272422464375525, | |
| "loss": 0.1733, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.23889354568315171, | |
| "grad_norm": 0.34150460362434387, | |
| "learning_rate": 0.00015230511316010058, | |
| "loss": 0.1721, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.240989103101425, | |
| "grad_norm": 0.26667365431785583, | |
| "learning_rate": 0.00015188600167644593, | |
| "loss": 0.1778, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.24308466051969824, | |
| "grad_norm": 0.2024029940366745, | |
| "learning_rate": 0.0001514668901927913, | |
| "loss": 0.1681, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.2451802179379715, | |
| "grad_norm": 0.1918814480304718, | |
| "learning_rate": 0.00015104777870913663, | |
| "loss": 0.1672, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.24727577535624476, | |
| "grad_norm": 0.22173915803432465, | |
| "learning_rate": 0.00015062866722548199, | |
| "loss": 0.1629, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.24937133277451803, | |
| "grad_norm": 0.20861753821372986, | |
| "learning_rate": 0.00015020955574182734, | |
| "loss": 0.1695, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.2514668901927913, | |
| "grad_norm": 0.2476891726255417, | |
| "learning_rate": 0.0001497904442581727, | |
| "loss": 0.1687, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.25356244761106456, | |
| "grad_norm": 0.23498353362083435, | |
| "learning_rate": 0.00014937133277451801, | |
| "loss": 0.1721, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.25565800502933783, | |
| "grad_norm": 0.24014067649841309, | |
| "learning_rate": 0.00014895222129086337, | |
| "loss": 0.1643, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.25775356244761105, | |
| "grad_norm": 1.1056888103485107, | |
| "learning_rate": 0.00014853310980720872, | |
| "loss": 0.1652, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.2598491198658843, | |
| "grad_norm": 0.3467954397201538, | |
| "learning_rate": 0.00014811399832355407, | |
| "loss": 0.1741, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.2619446772841576, | |
| "grad_norm": 0.29533931612968445, | |
| "learning_rate": 0.00014769488683989942, | |
| "loss": 0.1768, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.2640402347024308, | |
| "grad_norm": 0.2000960111618042, | |
| "learning_rate": 0.00014727577535624477, | |
| "loss": 0.1662, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2661357921207041, | |
| "grad_norm": 0.18396534025669098, | |
| "learning_rate": 0.00014685666387259013, | |
| "loss": 0.166, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.26823134953897737, | |
| "grad_norm": 0.18797029554843903, | |
| "learning_rate": 0.00014643755238893545, | |
| "loss": 0.1624, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.27032690695725065, | |
| "grad_norm": 0.1914837509393692, | |
| "learning_rate": 0.0001460184409052808, | |
| "loss": 0.1625, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.27242246437552387, | |
| "grad_norm": 0.18006779253482819, | |
| "learning_rate": 0.00014559932942162615, | |
| "loss": 0.1602, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.27451802179379714, | |
| "grad_norm": 0.19413350522518158, | |
| "learning_rate": 0.0001451802179379715, | |
| "loss": 0.1632, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.2766135792120704, | |
| "grad_norm": 0.19674436748027802, | |
| "learning_rate": 0.00014476110645431686, | |
| "loss": 0.1593, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.2787091366303437, | |
| "grad_norm": 0.16716861724853516, | |
| "learning_rate": 0.0001443419949706622, | |
| "loss": 0.161, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.2808046940486169, | |
| "grad_norm": 0.1591351330280304, | |
| "learning_rate": 0.00014392288348700756, | |
| "loss": 0.163, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.2829002514668902, | |
| "grad_norm": 0.16973218321800232, | |
| "learning_rate": 0.0001435037720033529, | |
| "loss": 0.1665, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.28499580888516346, | |
| "grad_norm": 0.16811800003051758, | |
| "learning_rate": 0.00014308466051969824, | |
| "loss": 0.1586, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.28709136630343673, | |
| "grad_norm": 0.21386906504631042, | |
| "learning_rate": 0.0001426655490360436, | |
| "loss": 0.1589, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.28918692372170995, | |
| "grad_norm": 0.17704640328884125, | |
| "learning_rate": 0.00014224643755238894, | |
| "loss": 0.1597, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.29128248113998323, | |
| "grad_norm": 0.1727607399225235, | |
| "learning_rate": 0.0001418273260687343, | |
| "loss": 0.1565, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.2933780385582565, | |
| "grad_norm": 0.18027518689632416, | |
| "learning_rate": 0.00014140821458507964, | |
| "loss": 0.1597, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2954735959765298, | |
| "grad_norm": 0.18142397701740265, | |
| "learning_rate": 0.000140989103101425, | |
| "loss": 0.1618, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.297569153394803, | |
| "grad_norm": 0.18902671337127686, | |
| "learning_rate": 0.00014056999161777035, | |
| "loss": 0.1606, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.2996647108130763, | |
| "grad_norm": 0.2578323185443878, | |
| "learning_rate": 0.00014015088013411567, | |
| "loss": 0.1658, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.30176026823134955, | |
| "grad_norm": 0.2432331144809723, | |
| "learning_rate": 0.00013973176865046102, | |
| "loss": 0.1694, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.3038558256496228, | |
| "grad_norm": 0.1846640259027481, | |
| "learning_rate": 0.00013931265716680638, | |
| "loss": 0.1655, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.30595138306789604, | |
| "grad_norm": 0.1582545042037964, | |
| "learning_rate": 0.00013889354568315173, | |
| "loss": 0.1639, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.3080469404861693, | |
| "grad_norm": 0.16749900579452515, | |
| "learning_rate": 0.00013847443419949708, | |
| "loss": 0.1596, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.3101424979044426, | |
| "grad_norm": 0.17118974030017853, | |
| "learning_rate": 0.00013805532271584243, | |
| "loss": 0.1569, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.31223805532271587, | |
| "grad_norm": 0.16258081793785095, | |
| "learning_rate": 0.00013763621123218778, | |
| "loss": 0.1599, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.3143336127409891, | |
| "grad_norm": 0.17213059961795807, | |
| "learning_rate": 0.0001372170997485331, | |
| "loss": 0.1588, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.31642917015926236, | |
| "grad_norm": 0.18207783997058868, | |
| "learning_rate": 0.00013679798826487846, | |
| "loss": 0.1571, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.31852472757753564, | |
| "grad_norm": 0.19723886251449585, | |
| "learning_rate": 0.0001363788767812238, | |
| "loss": 0.1552, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.32062028499580886, | |
| "grad_norm": 0.17248809337615967, | |
| "learning_rate": 0.00013595976529756916, | |
| "loss": 0.1564, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.32271584241408213, | |
| "grad_norm": 0.18213661015033722, | |
| "learning_rate": 0.0001355406538139145, | |
| "loss": 0.1597, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3248113998323554, | |
| "grad_norm": 0.1856526881456375, | |
| "learning_rate": 0.00013512154233025987, | |
| "loss": 0.1609, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.3269069572506287, | |
| "grad_norm": 0.16957145929336548, | |
| "learning_rate": 0.00013470243084660522, | |
| "loss": 0.1595, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.3290025146689019, | |
| "grad_norm": 0.17088639736175537, | |
| "learning_rate": 0.00013428331936295054, | |
| "loss": 0.161, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.3310980720871752, | |
| "grad_norm": 1.320410132408142, | |
| "learning_rate": 0.0001338642078792959, | |
| "loss": 0.165, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.33319362950544845, | |
| "grad_norm": 0.19440437853336334, | |
| "learning_rate": 0.00013344509639564125, | |
| "loss": 0.1602, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.3352891869237217, | |
| "grad_norm": 0.19590894877910614, | |
| "learning_rate": 0.0001330259849119866, | |
| "loss": 0.1616, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.33738474434199495, | |
| "grad_norm": 0.19055521488189697, | |
| "learning_rate": 0.00013260687342833192, | |
| "loss": 0.1638, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.3394803017602682, | |
| "grad_norm": 0.1577232927083969, | |
| "learning_rate": 0.00013218776194467727, | |
| "loss": 0.1582, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.3415758591785415, | |
| "grad_norm": 0.16225744783878326, | |
| "learning_rate": 0.00013176865046102265, | |
| "loss": 0.1555, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.34367141659681477, | |
| "grad_norm": 0.1565002202987671, | |
| "learning_rate": 0.000131349538977368, | |
| "loss": 0.1567, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.345766974015088, | |
| "grad_norm": 0.18790805339813232, | |
| "learning_rate": 0.00013093042749371333, | |
| "loss": 0.1581, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.34786253143336127, | |
| "grad_norm": 0.18632063269615173, | |
| "learning_rate": 0.00013051131601005868, | |
| "loss": 0.1621, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.34995808885163454, | |
| "grad_norm": 0.17684835195541382, | |
| "learning_rate": 0.00013009220452640403, | |
| "loss": 0.1591, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.3520536462699078, | |
| "grad_norm": 0.16213147342205048, | |
| "learning_rate": 0.00012967309304274938, | |
| "loss": 0.1593, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.35414920368818104, | |
| "grad_norm": 0.1656450480222702, | |
| "learning_rate": 0.0001292539815590947, | |
| "loss": 0.1617, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.3562447611064543, | |
| "grad_norm": 0.16887331008911133, | |
| "learning_rate": 0.0001288348700754401, | |
| "loss": 0.1601, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.3583403185247276, | |
| "grad_norm": 0.17660242319107056, | |
| "learning_rate": 0.00012841575859178544, | |
| "loss": 0.1575, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.36043587594300086, | |
| "grad_norm": 0.17625996470451355, | |
| "learning_rate": 0.00012799664710813076, | |
| "loss": 0.1597, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.3625314333612741, | |
| "grad_norm": 0.15301348268985748, | |
| "learning_rate": 0.00012757753562447612, | |
| "loss": 0.1597, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.36462699077954736, | |
| "grad_norm": 0.15253449976444244, | |
| "learning_rate": 0.00012715842414082147, | |
| "loss": 0.157, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.36672254819782063, | |
| "grad_norm": 0.16985070705413818, | |
| "learning_rate": 0.00012673931265716682, | |
| "loss": 0.1558, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.3688181056160939, | |
| "grad_norm": 0.1545540988445282, | |
| "learning_rate": 0.00012632020117351214, | |
| "loss": 0.1596, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.3709136630343671, | |
| "grad_norm": 0.1766255795955658, | |
| "learning_rate": 0.0001259010896898575, | |
| "loss": 0.1537, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.3730092204526404, | |
| "grad_norm": 0.15094798803329468, | |
| "learning_rate": 0.00012548197820620288, | |
| "loss": 0.1562, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.3751047778709137, | |
| "grad_norm": 0.16060136258602142, | |
| "learning_rate": 0.0001250628667225482, | |
| "loss": 0.158, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.37720033528918695, | |
| "grad_norm": 0.17947901785373688, | |
| "learning_rate": 0.00012464375523889355, | |
| "loss": 0.1583, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.37929589270746017, | |
| "grad_norm": 0.15727129578590393, | |
| "learning_rate": 0.0001242246437552389, | |
| "loss": 0.1529, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.38139145012573344, | |
| "grad_norm": 0.14456555247306824, | |
| "learning_rate": 0.00012380553227158426, | |
| "loss": 0.1573, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.3834870075440067, | |
| "grad_norm": 0.15629969537258148, | |
| "learning_rate": 0.00012338642078792958, | |
| "loss": 0.1607, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.38558256496227994, | |
| "grad_norm": 0.1958230435848236, | |
| "learning_rate": 0.00012296730930427493, | |
| "loss": 0.1572, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.3876781223805532, | |
| "grad_norm": 0.18159349262714386, | |
| "learning_rate": 0.00012254819782062028, | |
| "loss": 0.1565, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.3897736797988265, | |
| "grad_norm": 0.19391848146915436, | |
| "learning_rate": 0.00012212908633696564, | |
| "loss": 0.1552, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.39186923721709976, | |
| "grad_norm": 0.18596895039081573, | |
| "learning_rate": 0.00012170997485331099, | |
| "loss": 0.1623, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.393964794635373, | |
| "grad_norm": 0.175604447722435, | |
| "learning_rate": 0.00012129086336965634, | |
| "loss": 0.1593, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.39606035205364626, | |
| "grad_norm": 0.18805819749832153, | |
| "learning_rate": 0.00012087175188600168, | |
| "loss": 0.1603, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.39815590947191953, | |
| "grad_norm": 0.15223102271556854, | |
| "learning_rate": 0.00012045264040234703, | |
| "loss": 0.1568, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.4002514668901928, | |
| "grad_norm": 0.13771827518939972, | |
| "learning_rate": 0.00012003352891869237, | |
| "loss": 0.1553, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.40234702430846603, | |
| "grad_norm": 0.1633366495370865, | |
| "learning_rate": 0.00011961441743503772, | |
| "loss": 0.156, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.4044425817267393, | |
| "grad_norm": 0.17143379151821136, | |
| "learning_rate": 0.00011919530595138306, | |
| "loss": 0.1542, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.4065381391450126, | |
| "grad_norm": 0.16767437756061554, | |
| "learning_rate": 0.00011877619446772844, | |
| "loss": 0.1572, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.40863369656328585, | |
| "grad_norm": 0.16299773752689362, | |
| "learning_rate": 0.00011835708298407377, | |
| "loss": 0.1583, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.4107292539815591, | |
| "grad_norm": 0.15164397656917572, | |
| "learning_rate": 0.00011793797150041913, | |
| "loss": 0.1526, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.41282481139983235, | |
| "grad_norm": 0.1567896157503128, | |
| "learning_rate": 0.00011751886001676446, | |
| "loss": 0.154, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.4149203688181056, | |
| "grad_norm": 0.15943744778633118, | |
| "learning_rate": 0.00011709974853310982, | |
| "loss": 0.1533, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.4170159262363789, | |
| "grad_norm": 0.16572755575180054, | |
| "learning_rate": 0.00011668063704945515, | |
| "loss": 0.1559, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.4191114836546521, | |
| "grad_norm": 0.15907670557498932, | |
| "learning_rate": 0.0001162615255658005, | |
| "loss": 0.1558, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.4212070410729254, | |
| "grad_norm": 0.1808643937110901, | |
| "learning_rate": 0.00011584241408214587, | |
| "loss": 0.1555, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.42330259849119867, | |
| "grad_norm": 0.23819687962532043, | |
| "learning_rate": 0.00011542330259849121, | |
| "loss": 0.1579, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.42539815590947194, | |
| "grad_norm": 0.24894015491008759, | |
| "learning_rate": 0.00011500419111483656, | |
| "loss": 0.1637, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.42749371332774516, | |
| "grad_norm": 0.18128713965415955, | |
| "learning_rate": 0.0001145850796311819, | |
| "loss": 0.1605, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.42958927074601844, | |
| "grad_norm": 0.1779192090034485, | |
| "learning_rate": 0.00011416596814752725, | |
| "loss": 0.1586, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.4316848281642917, | |
| "grad_norm": 0.1617233157157898, | |
| "learning_rate": 0.00011374685666387259, | |
| "loss": 0.1548, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.433780385582565, | |
| "grad_norm": 0.146457701921463, | |
| "learning_rate": 0.00011332774518021794, | |
| "loss": 0.1581, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.4358759430008382, | |
| "grad_norm": 18.378135681152344, | |
| "learning_rate": 0.00011290863369656328, | |
| "loss": 0.1546, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4379715004191115, | |
| "grad_norm": 0.16601739823818207, | |
| "learning_rate": 0.00011248952221290864, | |
| "loss": 0.1573, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.44006705783738476, | |
| "grad_norm": 0.18232333660125732, | |
| "learning_rate": 0.000112070410729254, | |
| "loss": 0.1546, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.44216261525565803, | |
| "grad_norm": 0.15297749638557434, | |
| "learning_rate": 0.00011165129924559933, | |
| "loss": 0.1561, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.44425817267393125, | |
| "grad_norm": 7.5409440994262695, | |
| "learning_rate": 0.00011123218776194469, | |
| "loss": 0.1614, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.4463537300922045, | |
| "grad_norm": 0.1807660162448883, | |
| "learning_rate": 0.00011081307627829002, | |
| "loss": 0.1624, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.4484492875104778, | |
| "grad_norm": 0.18783360719680786, | |
| "learning_rate": 0.00011039396479463538, | |
| "loss": 0.1609, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.450544844928751, | |
| "grad_norm": 0.23045672476291656, | |
| "learning_rate": 0.00010997485331098071, | |
| "loss": 0.1618, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.4526404023470243, | |
| "grad_norm": 0.2051040381193161, | |
| "learning_rate": 0.00010955574182732607, | |
| "loss": 0.164, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.45473595976529757, | |
| "grad_norm": 0.17596812546253204, | |
| "learning_rate": 0.00010913663034367143, | |
| "loss": 0.1578, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.45683151718357085, | |
| "grad_norm": 0.1564697027206421, | |
| "learning_rate": 0.00010871751886001677, | |
| "loss": 0.1549, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.45892707460184406, | |
| "grad_norm": 0.2508351802825928, | |
| "learning_rate": 0.00010829840737636212, | |
| "loss": 0.1541, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.46102263202011734, | |
| "grad_norm": 0.17002500593662262, | |
| "learning_rate": 0.00010787929589270746, | |
| "loss": 0.1606, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.4631181894383906, | |
| "grad_norm": 0.1768285632133484, | |
| "learning_rate": 0.00010746018440905281, | |
| "loss": 0.1533, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.4652137468566639, | |
| "grad_norm": 0.21098843216896057, | |
| "learning_rate": 0.00010704107292539815, | |
| "loss": 0.1598, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.4673093042749371, | |
| "grad_norm": 0.1743684709072113, | |
| "learning_rate": 0.0001066219614417435, | |
| "loss": 0.1577, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.4694048616932104, | |
| "grad_norm": 0.18270978331565857, | |
| "learning_rate": 0.00010620284995808884, | |
| "loss": 0.1533, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.47150041911148366, | |
| "grad_norm": 0.19265097379684448, | |
| "learning_rate": 0.00010578373847443422, | |
| "loss": 0.1545, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.47359597652975693, | |
| "grad_norm": 0.19420358538627625, | |
| "learning_rate": 0.00010536462699077956, | |
| "loss": 0.1587, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.47569153394803015, | |
| "grad_norm": 0.1721310317516327, | |
| "learning_rate": 0.00010494551550712491, | |
| "loss": 0.1577, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.47778709136630343, | |
| "grad_norm": 0.1586717814207077, | |
| "learning_rate": 0.00010452640402347025, | |
| "loss": 0.1551, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.4798826487845767, | |
| "grad_norm": 1.5393892526626587, | |
| "learning_rate": 0.0001041072925398156, | |
| "loss": 0.1537, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.48197820620285, | |
| "grad_norm": 0.17265351116657257, | |
| "learning_rate": 0.00010368818105616094, | |
| "loss": 0.1514, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.4840737636211232, | |
| "grad_norm": 0.15501521527767181, | |
| "learning_rate": 0.00010326906957250629, | |
| "loss": 0.1559, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.4861693210393965, | |
| "grad_norm": 0.1425572782754898, | |
| "learning_rate": 0.00010284995808885165, | |
| "loss": 0.1532, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.48826487845766975, | |
| "grad_norm": 0.1547863483428955, | |
| "learning_rate": 0.00010243084660519699, | |
| "loss": 0.1557, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.490360435875943, | |
| "grad_norm": 0.1579703986644745, | |
| "learning_rate": 0.00010201173512154234, | |
| "loss": 0.1568, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.49245599329421624, | |
| "grad_norm": 4.8748297691345215, | |
| "learning_rate": 0.00010159262363788768, | |
| "loss": 0.1542, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.4945515507124895, | |
| "grad_norm": 0.16955487430095673, | |
| "learning_rate": 0.00010117351215423303, | |
| "loss": 0.1573, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.4966471081307628, | |
| "grad_norm": 0.17641142010688782, | |
| "learning_rate": 0.00010075440067057837, | |
| "loss": 0.1563, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.49874266554903607, | |
| "grad_norm": 0.19048817455768585, | |
| "learning_rate": 0.00010033528918692372, | |
| "loss": 0.1565, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.5008382229673093, | |
| "grad_norm": 0.1500770002603531, | |
| "learning_rate": 9.991617770326908e-05, | |
| "loss": 0.1561, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.5029337803855826, | |
| "grad_norm": 0.1577143371105194, | |
| "learning_rate": 9.949706621961443e-05, | |
| "loss": 0.1555, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.5050293378038558, | |
| "grad_norm": 0.14767815172672272, | |
| "learning_rate": 9.907795473595977e-05, | |
| "loss": 0.1546, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.5071248952221291, | |
| "grad_norm": 0.1728920042514801, | |
| "learning_rate": 9.865884325230512e-05, | |
| "loss": 0.1582, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.5092204526404024, | |
| "grad_norm": 0.13907761871814728, | |
| "learning_rate": 9.823973176865047e-05, | |
| "loss": 0.154, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.5113160100586757, | |
| "grad_norm": 0.14644944667816162, | |
| "learning_rate": 9.782062028499581e-05, | |
| "loss": 0.151, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.5134115674769488, | |
| "grad_norm": 0.1749754697084427, | |
| "learning_rate": 9.740150880134116e-05, | |
| "loss": 0.1558, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.5155071248952221, | |
| "grad_norm": 1.7467832565307617, | |
| "learning_rate": 9.698239731768651e-05, | |
| "loss": 0.154, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.5176026823134954, | |
| "grad_norm": 0.3709283471107483, | |
| "learning_rate": 9.656328583403186e-05, | |
| "loss": 0.1648, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.5196982397317687, | |
| "grad_norm": 0.28035739064216614, | |
| "learning_rate": 9.61441743503772e-05, | |
| "loss": 0.1697, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.5217937971500419, | |
| "grad_norm": 0.1671634167432785, | |
| "learning_rate": 9.572506286672255e-05, | |
| "loss": 0.1611, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.5238893545683152, | |
| "grad_norm": 0.15249969065189362, | |
| "learning_rate": 9.53059513830679e-05, | |
| "loss": 0.1557, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.5259849119865885, | |
| "grad_norm": 0.14879916608333588, | |
| "learning_rate": 9.488683989941326e-05, | |
| "loss": 0.1516, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.5280804694048616, | |
| "grad_norm": 2.7410941123962402, | |
| "learning_rate": 9.44677284157586e-05, | |
| "loss": 0.1497, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5301760268231349, | |
| "grad_norm": 0.188863143324852, | |
| "learning_rate": 9.404861693210395e-05, | |
| "loss": 0.1567, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.5322715842414082, | |
| "grad_norm": 0.22812993824481964, | |
| "learning_rate": 9.36295054484493e-05, | |
| "loss": 0.1586, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.5343671416596815, | |
| "grad_norm": 0.23461438715457916, | |
| "learning_rate": 9.321039396479464e-05, | |
| "loss": 0.1593, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.5364626990779547, | |
| "grad_norm": 0.23611848056316376, | |
| "learning_rate": 9.279128248113999e-05, | |
| "loss": 0.1575, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.538558256496228, | |
| "grad_norm": 0.20142588019371033, | |
| "learning_rate": 9.237217099748533e-05, | |
| "loss": 0.1611, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.5406538139145013, | |
| "grad_norm": 0.18650312721729279, | |
| "learning_rate": 9.195305951383069e-05, | |
| "loss": 0.1585, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5427493713327746, | |
| "grad_norm": 0.1717916578054428, | |
| "learning_rate": 9.153394803017603e-05, | |
| "loss": 0.1566, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.5448449287510477, | |
| "grad_norm": 0.154686838388443, | |
| "learning_rate": 9.111483654652138e-05, | |
| "loss": 0.154, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.546940486169321, | |
| "grad_norm": 0.17387616634368896, | |
| "learning_rate": 9.069572506286673e-05, | |
| "loss": 0.1574, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.5490360435875943, | |
| "grad_norm": 0.1707635372877121, | |
| "learning_rate": 9.027661357921207e-05, | |
| "loss": 0.1595, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5511316010058676, | |
| "grad_norm": 0.1505287140607834, | |
| "learning_rate": 8.985750209555742e-05, | |
| "loss": 0.1572, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.5532271584241408, | |
| "grad_norm": 0.17044112086296082, | |
| "learning_rate": 8.943839061190276e-05, | |
| "loss": 0.1526, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5553227158424141, | |
| "grad_norm": 0.20292381942272186, | |
| "learning_rate": 8.901927912824813e-05, | |
| "loss": 0.157, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.5574182732606874, | |
| "grad_norm": 0.22127684950828552, | |
| "learning_rate": 8.860016764459346e-05, | |
| "loss": 0.1569, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5595138306789607, | |
| "grad_norm": 0.169328510761261, | |
| "learning_rate": 8.818105616093882e-05, | |
| "loss": 0.1574, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.5616093880972338, | |
| "grad_norm": 0.1888064444065094, | |
| "learning_rate": 8.776194467728415e-05, | |
| "loss": 0.152, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.5637049455155071, | |
| "grad_norm": 2.7211835384368896, | |
| "learning_rate": 8.734283319362952e-05, | |
| "loss": 0.1557, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.5658005029337804, | |
| "grad_norm": 0.18435344099998474, | |
| "learning_rate": 8.692372170997486e-05, | |
| "loss": 0.1527, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5678960603520536, | |
| "grad_norm": 0.2031932920217514, | |
| "learning_rate": 8.650461022632021e-05, | |
| "loss": 0.1573, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.5699916177703269, | |
| "grad_norm": 0.1941346377134323, | |
| "learning_rate": 8.608549874266555e-05, | |
| "loss": 0.1538, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5720871751886002, | |
| "grad_norm": 0.18645240366458893, | |
| "learning_rate": 8.56663872590109e-05, | |
| "loss": 0.1542, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.5741827326068735, | |
| "grad_norm": 0.1776381880044937, | |
| "learning_rate": 8.524727577535625e-05, | |
| "loss": 0.1567, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5762782900251467, | |
| "grad_norm": 0.17730367183685303, | |
| "learning_rate": 8.482816429170159e-05, | |
| "loss": 0.1552, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.5783738474434199, | |
| "grad_norm": 0.1614091843366623, | |
| "learning_rate": 8.440905280804694e-05, | |
| "loss": 0.1545, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5804694048616932, | |
| "grad_norm": 0.2010612040758133, | |
| "learning_rate": 8.398994132439229e-05, | |
| "loss": 0.1525, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.5825649622799665, | |
| "grad_norm": 0.29623404145240784, | |
| "learning_rate": 8.357082984073764e-05, | |
| "loss": 0.1632, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5846605196982397, | |
| "grad_norm": 0.26536107063293457, | |
| "learning_rate": 8.315171835708298e-05, | |
| "loss": 0.161, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.586756077116513, | |
| "grad_norm": 0.21128953993320465, | |
| "learning_rate": 8.273260687342833e-05, | |
| "loss": 0.1585, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5888516345347863, | |
| "grad_norm": 3.6902313232421875, | |
| "learning_rate": 8.231349538977369e-05, | |
| "loss": 0.153, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.5909471919530596, | |
| "grad_norm": 0.18794021010398865, | |
| "learning_rate": 8.189438390611904e-05, | |
| "loss": 0.1585, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.5930427493713327, | |
| "grad_norm": 0.17947795987129211, | |
| "learning_rate": 8.147527242246438e-05, | |
| "loss": 0.1584, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.595138306789606, | |
| "grad_norm": 0.15674275159835815, | |
| "learning_rate": 8.105616093880973e-05, | |
| "loss": 0.1547, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5972338642078793, | |
| "grad_norm": 0.15368077158927917, | |
| "learning_rate": 8.063704945515508e-05, | |
| "loss": 0.154, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.5993294216261525, | |
| "grad_norm": 0.1675073206424713, | |
| "learning_rate": 8.021793797150042e-05, | |
| "loss": 0.1519, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.6014249790444258, | |
| "grad_norm": 0.15424901247024536, | |
| "learning_rate": 7.979882648784577e-05, | |
| "loss": 0.1532, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.6035205364626991, | |
| "grad_norm": 1.752557396888733, | |
| "learning_rate": 7.937971500419112e-05, | |
| "loss": 0.1521, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.6056160938809724, | |
| "grad_norm": 0.179281547665596, | |
| "learning_rate": 7.896060352053647e-05, | |
| "loss": 0.1568, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.6077116512992456, | |
| "grad_norm": 0.18618442118167877, | |
| "learning_rate": 7.854149203688181e-05, | |
| "loss": 0.1555, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.6098072087175188, | |
| "grad_norm": 0.1767512410879135, | |
| "learning_rate": 7.812238055322716e-05, | |
| "loss": 0.156, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.6119027661357921, | |
| "grad_norm": 0.1729685217142105, | |
| "learning_rate": 7.770326906957252e-05, | |
| "loss": 0.1521, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.6139983235540654, | |
| "grad_norm": 0.16246432065963745, | |
| "learning_rate": 7.728415758591785e-05, | |
| "loss": 0.1541, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.6160938809723386, | |
| "grad_norm": 0.15939489006996155, | |
| "learning_rate": 7.68650461022632e-05, | |
| "loss": 0.1551, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.6181894383906119, | |
| "grad_norm": 0.1684809774160385, | |
| "learning_rate": 7.644593461860854e-05, | |
| "loss": 0.1542, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.6202849958088852, | |
| "grad_norm": 0.18968342244625092, | |
| "learning_rate": 7.602682313495391e-05, | |
| "loss": 0.1537, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.6223805532271585, | |
| "grad_norm": 0.20144881308078766, | |
| "learning_rate": 7.560771165129925e-05, | |
| "loss": 0.158, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.6244761106454317, | |
| "grad_norm": 0.1785353720188141, | |
| "learning_rate": 7.51886001676446e-05, | |
| "loss": 0.1537, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.6265716680637049, | |
| "grad_norm": 3.9302639961242676, | |
| "learning_rate": 7.476948868398994e-05, | |
| "loss": 0.1539, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.6286672254819782, | |
| "grad_norm": 0.17349500954151154, | |
| "learning_rate": 7.43503772003353e-05, | |
| "loss": 0.1549, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.6307627829002515, | |
| "grad_norm": 0.181074857711792, | |
| "learning_rate": 7.393126571668064e-05, | |
| "loss": 0.1523, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.6328583403185247, | |
| "grad_norm": 0.1837494969367981, | |
| "learning_rate": 7.351215423302599e-05, | |
| "loss": 0.1543, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.634953897736798, | |
| "grad_norm": 0.17972221970558167, | |
| "learning_rate": 7.309304274937133e-05, | |
| "loss": 0.1539, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.6370494551550713, | |
| "grad_norm": 0.18084241449832916, | |
| "learning_rate": 7.267393126571668e-05, | |
| "loss": 0.1504, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6391450125733446, | |
| "grad_norm": 0.16487205028533936, | |
| "learning_rate": 7.225481978206203e-05, | |
| "loss": 0.1547, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.6412405699916177, | |
| "grad_norm": 3.8815670013427734, | |
| "learning_rate": 7.183570829840737e-05, | |
| "loss": 0.1481, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.643336127409891, | |
| "grad_norm": 0.18625426292419434, | |
| "learning_rate": 7.141659681475272e-05, | |
| "loss": 0.1551, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.6454316848281643, | |
| "grad_norm": 0.2006104439496994, | |
| "learning_rate": 7.099748533109808e-05, | |
| "loss": 0.1567, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6475272422464375, | |
| "grad_norm": 0.20722784101963043, | |
| "learning_rate": 7.057837384744343e-05, | |
| "loss": 0.1599, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.6496227996647108, | |
| "grad_norm": 0.18843336403369904, | |
| "learning_rate": 7.015926236378877e-05, | |
| "loss": 0.1568, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6517183570829841, | |
| "grad_norm": 0.16453127562999725, | |
| "learning_rate": 6.974015088013412e-05, | |
| "loss": 0.1564, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.6538139145012574, | |
| "grad_norm": 0.1718008667230606, | |
| "learning_rate": 6.932103939647947e-05, | |
| "loss": 0.1507, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6559094719195306, | |
| "grad_norm": 1.9749072790145874, | |
| "learning_rate": 6.890192791282482e-05, | |
| "loss": 0.1546, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.6580050293378038, | |
| "grad_norm": 0.846435546875, | |
| "learning_rate": 6.848281642917016e-05, | |
| "loss": 0.1553, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6601005867560771, | |
| "grad_norm": 0.21918132901191711, | |
| "learning_rate": 6.806370494551551e-05, | |
| "loss": 0.1544, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.6621961441743504, | |
| "grad_norm": 0.20493735373020172, | |
| "learning_rate": 6.764459346186086e-05, | |
| "loss": 0.1588, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6642917015926236, | |
| "grad_norm": 0.16465319693088531, | |
| "learning_rate": 6.72254819782062e-05, | |
| "loss": 0.1556, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.6663872590108969, | |
| "grad_norm": 0.1511167287826538, | |
| "learning_rate": 6.680637049455155e-05, | |
| "loss": 0.1551, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.6684828164291702, | |
| "grad_norm": 0.15197286009788513, | |
| "learning_rate": 6.63872590108969e-05, | |
| "loss": 0.1513, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.6705783738474435, | |
| "grad_norm": 0.14843755960464478, | |
| "learning_rate": 6.596814752724226e-05, | |
| "loss": 0.1476, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6726739312657167, | |
| "grad_norm": 0.14049085974693298, | |
| "learning_rate": 6.55490360435876e-05, | |
| "loss": 0.1507, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.6747694886839899, | |
| "grad_norm": 0.1492355465888977, | |
| "learning_rate": 6.512992455993295e-05, | |
| "loss": 0.1499, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6768650461022632, | |
| "grad_norm": 0.15042835474014282, | |
| "learning_rate": 6.47108130762783e-05, | |
| "loss": 0.15, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.6789606035205364, | |
| "grad_norm": 0.14849700033664703, | |
| "learning_rate": 6.429170159262365e-05, | |
| "loss": 0.152, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6810561609388097, | |
| "grad_norm": 0.15401747822761536, | |
| "learning_rate": 6.387259010896899e-05, | |
| "loss": 0.1507, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.683151718357083, | |
| "grad_norm": 0.13168840110301971, | |
| "learning_rate": 6.345347862531434e-05, | |
| "loss": 0.153, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6852472757753563, | |
| "grad_norm": 0.1513977199792862, | |
| "learning_rate": 6.303436714165969e-05, | |
| "loss": 0.1516, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.6873428331936295, | |
| "grad_norm": 0.14483259618282318, | |
| "learning_rate": 6.261525565800503e-05, | |
| "loss": 0.15, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6894383906119028, | |
| "grad_norm": 0.15542860329151154, | |
| "learning_rate": 6.219614417435038e-05, | |
| "loss": 0.1493, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.691533948030176, | |
| "grad_norm": 0.14083847403526306, | |
| "learning_rate": 6.177703269069572e-05, | |
| "loss": 0.1511, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6936295054484493, | |
| "grad_norm": 0.14129005372524261, | |
| "learning_rate": 6.135792120704108e-05, | |
| "loss": 0.1525, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.6957250628667225, | |
| "grad_norm": 0.14137649536132812, | |
| "learning_rate": 6.093880972338642e-05, | |
| "loss": 0.1486, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6978206202849958, | |
| "grad_norm": 0.163183331489563, | |
| "learning_rate": 6.051969823973177e-05, | |
| "loss": 0.1526, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.6999161777032691, | |
| "grad_norm": 0.15466581284999847, | |
| "learning_rate": 6.010058675607711e-05, | |
| "loss": 0.1523, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.7020117351215424, | |
| "grad_norm": 0.1582300364971161, | |
| "learning_rate": 5.968147527242247e-05, | |
| "loss": 0.1501, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.7041072925398156, | |
| "grad_norm": 0.15225544571876526, | |
| "learning_rate": 5.9262363788767817e-05, | |
| "loss": 0.1529, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.7062028499580888, | |
| "grad_norm": 0.1497444212436676, | |
| "learning_rate": 5.884325230511316e-05, | |
| "loss": 0.1504, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.7082984073763621, | |
| "grad_norm": 0.15386608242988586, | |
| "learning_rate": 5.8424140821458507e-05, | |
| "loss": 0.1507, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.7103939647946353, | |
| "grad_norm": 0.16286878287792206, | |
| "learning_rate": 5.8005029337803865e-05, | |
| "loss": 0.1483, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.7124895222129086, | |
| "grad_norm": 0.16049712896347046, | |
| "learning_rate": 5.758591785414921e-05, | |
| "loss": 0.1511, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.7145850796311819, | |
| "grad_norm": 0.16950005292892456, | |
| "learning_rate": 5.7166806370494555e-05, | |
| "loss": 0.1497, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.7166806370494552, | |
| "grad_norm": 0.14905424416065216, | |
| "learning_rate": 5.67476948868399e-05, | |
| "loss": 0.1518, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.7187761944677284, | |
| "grad_norm": 0.16250193119049072, | |
| "learning_rate": 5.632858340318525e-05, | |
| "loss": 0.1518, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.7208717518860017, | |
| "grad_norm": 0.15719325840473175, | |
| "learning_rate": 5.59094719195306e-05, | |
| "loss": 0.1509, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.7229673093042749, | |
| "grad_norm": 0.13987241685390472, | |
| "learning_rate": 5.549036043587594e-05, | |
| "loss": 0.1493, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.7250628667225482, | |
| "grad_norm": 0.15766645967960358, | |
| "learning_rate": 5.507124895222129e-05, | |
| "loss": 0.151, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.7271584241408214, | |
| "grad_norm": 0.1426958292722702, | |
| "learning_rate": 5.4652137468566645e-05, | |
| "loss": 0.1477, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.7292539815590947, | |
| "grad_norm": 0.15173014998435974, | |
| "learning_rate": 5.423302598491199e-05, | |
| "loss": 0.1498, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.731349538977368, | |
| "grad_norm": 0.13781285285949707, | |
| "learning_rate": 5.3813914501257335e-05, | |
| "loss": 0.1461, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.7334450963956413, | |
| "grad_norm": 0.15598557889461517, | |
| "learning_rate": 5.339480301760269e-05, | |
| "loss": 0.1468, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.7355406538139145, | |
| "grad_norm": 0.1487160325050354, | |
| "learning_rate": 5.297569153394803e-05, | |
| "loss": 0.1515, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.7376362112321878, | |
| "grad_norm": 0.1766008883714676, | |
| "learning_rate": 5.255658005029338e-05, | |
| "loss": 0.1496, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.739731768650461, | |
| "grad_norm": 0.1634136140346527, | |
| "learning_rate": 5.213746856663872e-05, | |
| "loss": 0.152, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.7418273260687342, | |
| "grad_norm": 0.16231022775173187, | |
| "learning_rate": 5.171835708298408e-05, | |
| "loss": 0.1511, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7439228834870075, | |
| "grad_norm": 0.1736147552728653, | |
| "learning_rate": 5.1299245599329425e-05, | |
| "loss": 0.1519, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.7460184409052808, | |
| "grad_norm": 0.15696606040000916, | |
| "learning_rate": 5.088013411567477e-05, | |
| "loss": 0.1522, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7481139983235541, | |
| "grad_norm": 0.16596518456935883, | |
| "learning_rate": 5.0461022632020115e-05, | |
| "loss": 0.1494, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.7502095557418273, | |
| "grad_norm": 0.16456949710845947, | |
| "learning_rate": 5.0041911148365474e-05, | |
| "loss": 0.1485, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7523051131601006, | |
| "grad_norm": 0.16951188445091248, | |
| "learning_rate": 4.962279966471082e-05, | |
| "loss": 0.1536, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.7544006705783739, | |
| "grad_norm": 0.17507892847061157, | |
| "learning_rate": 4.9203688181056164e-05, | |
| "loss": 0.1524, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7564962279966471, | |
| "grad_norm": 0.1613740175962448, | |
| "learning_rate": 4.8784576697401516e-05, | |
| "loss": 0.151, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.7585917854149203, | |
| "grad_norm": 0.1735554188489914, | |
| "learning_rate": 4.836546521374686e-05, | |
| "loss": 0.149, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7606873428331936, | |
| "grad_norm": 0.15241263806819916, | |
| "learning_rate": 4.7946353730092206e-05, | |
| "loss": 0.1475, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.7627829002514669, | |
| "grad_norm": 0.15598662197589874, | |
| "learning_rate": 4.752724224643755e-05, | |
| "loss": 0.1504, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7648784576697402, | |
| "grad_norm": 0.1638031303882599, | |
| "learning_rate": 4.71081307627829e-05, | |
| "loss": 0.1483, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.7669740150880134, | |
| "grad_norm": 0.15647321939468384, | |
| "learning_rate": 4.668901927912825e-05, | |
| "loss": 0.1487, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7690695725062867, | |
| "grad_norm": 0.15045635402202606, | |
| "learning_rate": 4.62699077954736e-05, | |
| "loss": 0.1489, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.7711651299245599, | |
| "grad_norm": 0.1481800377368927, | |
| "learning_rate": 4.5850796311818944e-05, | |
| "loss": 0.1498, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7732606873428332, | |
| "grad_norm": 0.16117189824581146, | |
| "learning_rate": 4.5431684828164296e-05, | |
| "loss": 0.147, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.7753562447611064, | |
| "grad_norm": 0.15690650045871735, | |
| "learning_rate": 4.501257334450964e-05, | |
| "loss": 0.152, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7774518021793797, | |
| "grad_norm": 0.15324068069458008, | |
| "learning_rate": 4.459346186085499e-05, | |
| "loss": 0.1509, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.779547359597653, | |
| "grad_norm": 0.1670048087835312, | |
| "learning_rate": 4.417435037720034e-05, | |
| "loss": 0.1481, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7816429170159263, | |
| "grad_norm": 0.16738423705101013, | |
| "learning_rate": 4.375523889354568e-05, | |
| "loss": 0.1525, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.7837384744341995, | |
| "grad_norm": 0.15926587581634521, | |
| "learning_rate": 4.333612740989103e-05, | |
| "loss": 0.1499, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7858340318524728, | |
| "grad_norm": 0.15555323660373688, | |
| "learning_rate": 4.291701592623638e-05, | |
| "loss": 0.1487, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.787929589270746, | |
| "grad_norm": 0.14897003769874573, | |
| "learning_rate": 4.2497904442581724e-05, | |
| "loss": 0.153, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7900251466890192, | |
| "grad_norm": 0.16712865233421326, | |
| "learning_rate": 4.2078792958927076e-05, | |
| "loss": 0.1522, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.7921207041072925, | |
| "grad_norm": 0.16521087288856506, | |
| "learning_rate": 4.165968147527242e-05, | |
| "loss": 0.1554, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7942162615255658, | |
| "grad_norm": 0.15836192667484283, | |
| "learning_rate": 4.124056999161777e-05, | |
| "loss": 0.1515, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.7963118189438391, | |
| "grad_norm": 0.1507478654384613, | |
| "learning_rate": 4.0821458507963125e-05, | |
| "loss": 0.1525, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.7984073763621123, | |
| "grad_norm": 0.6994775533676147, | |
| "learning_rate": 4.040234702430847e-05, | |
| "loss": 0.1522, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.8005029337803856, | |
| "grad_norm": 0.1698751598596573, | |
| "learning_rate": 3.998323554065382e-05, | |
| "loss": 0.152, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.8025984911986589, | |
| "grad_norm": 0.16350072622299194, | |
| "learning_rate": 3.9564124056999166e-05, | |
| "loss": 0.1499, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.8046940486169321, | |
| "grad_norm": 0.15547800064086914, | |
| "learning_rate": 3.914501257334451e-05, | |
| "loss": 0.1496, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.8067896060352053, | |
| "grad_norm": 0.1584520936012268, | |
| "learning_rate": 3.8725901089689856e-05, | |
| "loss": 0.1521, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.8088851634534786, | |
| "grad_norm": 0.15392720699310303, | |
| "learning_rate": 3.830678960603521e-05, | |
| "loss": 0.1513, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.8109807208717519, | |
| "grad_norm": 0.15251478552818298, | |
| "learning_rate": 3.788767812238055e-05, | |
| "loss": 0.1503, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.8130762782900252, | |
| "grad_norm": 0.1555824875831604, | |
| "learning_rate": 3.7468566638725905e-05, | |
| "loss": 0.148, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.8151718357082984, | |
| "grad_norm": 1.0603052377700806, | |
| "learning_rate": 3.704945515507125e-05, | |
| "loss": 0.1511, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.8172673931265717, | |
| "grad_norm": 0.21994204819202423, | |
| "learning_rate": 3.66303436714166e-05, | |
| "loss": 0.1536, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.819362950544845, | |
| "grad_norm": 0.2238265573978424, | |
| "learning_rate": 3.6211232187761947e-05, | |
| "loss": 0.1549, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.8214585079631181, | |
| "grad_norm": 0.212602898478508, | |
| "learning_rate": 3.57921207041073e-05, | |
| "loss": 0.1567, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.8235540653813914, | |
| "grad_norm": 0.18257835507392883, | |
| "learning_rate": 3.537300922045264e-05, | |
| "loss": 0.1513, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.8256496227996647, | |
| "grad_norm": 0.17607073485851288, | |
| "learning_rate": 3.495389773679799e-05, | |
| "loss": 0.1539, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.827745180217938, | |
| "grad_norm": 0.14804469048976898, | |
| "learning_rate": 3.453478625314333e-05, | |
| "loss": 0.1502, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.8298407376362112, | |
| "grad_norm": 0.15939727425575256, | |
| "learning_rate": 3.4115674769488685e-05, | |
| "loss": 0.1482, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.8319362950544845, | |
| "grad_norm": 0.16081516444683075, | |
| "learning_rate": 3.369656328583403e-05, | |
| "loss": 0.152, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.8340318524727578, | |
| "grad_norm": 0.7829539179801941, | |
| "learning_rate": 3.327745180217938e-05, | |
| "loss": 0.1497, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.836127409891031, | |
| "grad_norm": 0.15778230130672455, | |
| "learning_rate": 3.285834031852473e-05, | |
| "loss": 0.1485, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.8382229673093042, | |
| "grad_norm": 0.16156698763370514, | |
| "learning_rate": 3.243922883487008e-05, | |
| "loss": 0.1508, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8403185247275775, | |
| "grad_norm": 0.15631239116191864, | |
| "learning_rate": 3.2020117351215424e-05, | |
| "loss": 0.1484, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.8424140821458508, | |
| "grad_norm": 0.1522732824087143, | |
| "learning_rate": 3.1601005867560775e-05, | |
| "loss": 0.1508, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.8445096395641241, | |
| "grad_norm": 0.1581004112958908, | |
| "learning_rate": 3.118189438390612e-05, | |
| "loss": 0.149, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.8466051969823973, | |
| "grad_norm": 0.1513088047504425, | |
| "learning_rate": 3.076278290025147e-05, | |
| "loss": 0.15, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.8487007544006706, | |
| "grad_norm": 0.1780407577753067, | |
| "learning_rate": 3.0343671416596814e-05, | |
| "loss": 0.1494, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.8507963118189439, | |
| "grad_norm": 0.16309364140033722, | |
| "learning_rate": 2.9924559932942165e-05, | |
| "loss": 0.1474, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.852891869237217, | |
| "grad_norm": 0.15739715099334717, | |
| "learning_rate": 2.950544844928751e-05, | |
| "loss": 0.1496, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.8549874266554903, | |
| "grad_norm": 0.1566157042980194, | |
| "learning_rate": 2.908633696563286e-05, | |
| "loss": 0.1482, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.8570829840737636, | |
| "grad_norm": 0.14910633862018585, | |
| "learning_rate": 2.8667225481978204e-05, | |
| "loss": 0.1493, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.8591785414920369, | |
| "grad_norm": 0.15060973167419434, | |
| "learning_rate": 2.8248113998323556e-05, | |
| "loss": 0.1462, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.8612740989103101, | |
| "grad_norm": 0.15438151359558105, | |
| "learning_rate": 2.7829002514668907e-05, | |
| "loss": 0.1509, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.8633696563285834, | |
| "grad_norm": 0.15441566705703735, | |
| "learning_rate": 2.7409891031014252e-05, | |
| "loss": 0.1481, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.8654652137468567, | |
| "grad_norm": 0.15039560198783875, | |
| "learning_rate": 2.69907795473596e-05, | |
| "loss": 0.1466, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.86756077116513, | |
| "grad_norm": 0.14182990789413452, | |
| "learning_rate": 2.6571668063704946e-05, | |
| "loss": 0.1475, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.8696563285834031, | |
| "grad_norm": 0.1383397877216339, | |
| "learning_rate": 2.6152556580050297e-05, | |
| "loss": 0.1498, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.8717518860016764, | |
| "grad_norm": 0.1353161633014679, | |
| "learning_rate": 2.5733445096395642e-05, | |
| "loss": 0.1485, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.8738474434199497, | |
| "grad_norm": 0.14230230450630188, | |
| "learning_rate": 2.5314333612740994e-05, | |
| "loss": 0.1488, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.875943000838223, | |
| "grad_norm": 0.14749757945537567, | |
| "learning_rate": 2.4895222129086336e-05, | |
| "loss": 0.1464, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.8780385582564962, | |
| "grad_norm": 0.1343354731798172, | |
| "learning_rate": 2.4476110645431684e-05, | |
| "loss": 0.1476, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.8801341156747695, | |
| "grad_norm": 0.15284012258052826, | |
| "learning_rate": 2.4056999161777032e-05, | |
| "loss": 0.1499, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.8822296730930428, | |
| "grad_norm": 0.14641784131526947, | |
| "learning_rate": 2.363788767812238e-05, | |
| "loss": 0.1473, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.8843252305113161, | |
| "grad_norm": 0.13705027103424072, | |
| "learning_rate": 2.321877619446773e-05, | |
| "loss": 0.1464, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.8864207879295892, | |
| "grad_norm": 0.15073353052139282, | |
| "learning_rate": 2.2799664710813078e-05, | |
| "loss": 0.1464, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.8885163453478625, | |
| "grad_norm": 0.14341919124126434, | |
| "learning_rate": 2.2380553227158423e-05, | |
| "loss": 0.1479, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.8906119027661358, | |
| "grad_norm": 0.14984482526779175, | |
| "learning_rate": 2.196144174350377e-05, | |
| "loss": 0.1468, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.892707460184409, | |
| "grad_norm": 0.1421850025653839, | |
| "learning_rate": 2.1542330259849123e-05, | |
| "loss": 0.1431, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.8948030176026823, | |
| "grad_norm": 0.15779058635234833, | |
| "learning_rate": 2.112321877619447e-05, | |
| "loss": 0.1484, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.8968985750209556, | |
| "grad_norm": 0.14414595067501068, | |
| "learning_rate": 2.0704107292539816e-05, | |
| "loss": 0.1477, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.8989941324392289, | |
| "grad_norm": 0.15078584849834442, | |
| "learning_rate": 2.0284995808885164e-05, | |
| "loss": 0.1471, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.901089689857502, | |
| "grad_norm": 0.6652347445487976, | |
| "learning_rate": 1.9865884325230513e-05, | |
| "loss": 0.146, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.9031852472757753, | |
| "grad_norm": 0.1553775519132614, | |
| "learning_rate": 1.944677284157586e-05, | |
| "loss": 0.1455, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.9052808046940486, | |
| "grad_norm": 0.14678525924682617, | |
| "learning_rate": 1.902766135792121e-05, | |
| "loss": 0.1469, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.9073763621123219, | |
| "grad_norm": 0.1497979462146759, | |
| "learning_rate": 1.8608549874266558e-05, | |
| "loss": 0.1488, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.9094719195305951, | |
| "grad_norm": 0.14247293770313263, | |
| "learning_rate": 1.8189438390611903e-05, | |
| "loss": 0.147, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.9115674769488684, | |
| "grad_norm": 0.1436048001050949, | |
| "learning_rate": 1.777032690695725e-05, | |
| "loss": 0.148, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.9136630343671417, | |
| "grad_norm": 0.1506524235010147, | |
| "learning_rate": 1.73512154233026e-05, | |
| "loss": 0.147, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.915758591785415, | |
| "grad_norm": 0.14503729343414307, | |
| "learning_rate": 1.6932103939647948e-05, | |
| "loss": 0.1475, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.9178541492036881, | |
| "grad_norm": 0.14085812866687775, | |
| "learning_rate": 1.6512992455993296e-05, | |
| "loss": 0.1468, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.9199497066219614, | |
| "grad_norm": 0.13786457479000092, | |
| "learning_rate": 1.609388097233864e-05, | |
| "loss": 0.1449, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.9220452640402347, | |
| "grad_norm": 0.14363813400268555, | |
| "learning_rate": 1.567476948868399e-05, | |
| "loss": 0.144, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.924140821458508, | |
| "grad_norm": 0.14791929721832275, | |
| "learning_rate": 1.5255658005029338e-05, | |
| "loss": 0.147, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.9262363788767812, | |
| "grad_norm": 0.14655451476573944, | |
| "learning_rate": 1.4836546521374687e-05, | |
| "loss": 0.1496, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.9283319362950545, | |
| "grad_norm": 0.14993825554847717, | |
| "learning_rate": 1.4417435037720033e-05, | |
| "loss": 0.1465, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.9304274937133278, | |
| "grad_norm": 0.1357424557209015, | |
| "learning_rate": 1.3998323554065382e-05, | |
| "loss": 0.1478, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.932523051131601, | |
| "grad_norm": 0.15557856857776642, | |
| "learning_rate": 1.357921207041073e-05, | |
| "loss": 0.1429, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.9346186085498742, | |
| "grad_norm": 0.16618798673152924, | |
| "learning_rate": 1.3160100586756077e-05, | |
| "loss": 0.1475, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.9367141659681475, | |
| "grad_norm": 0.14325110614299774, | |
| "learning_rate": 1.2740989103101425e-05, | |
| "loss": 0.1453, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.9388097233864208, | |
| "grad_norm": 0.15656636655330658, | |
| "learning_rate": 1.2321877619446773e-05, | |
| "loss": 0.1463, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.940905280804694, | |
| "grad_norm": 0.15591678023338318, | |
| "learning_rate": 1.1902766135792122e-05, | |
| "loss": 0.1475, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.9430008382229673, | |
| "grad_norm": 0.14923661947250366, | |
| "learning_rate": 1.148365465213747e-05, | |
| "loss": 0.1474, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.9450963956412406, | |
| "grad_norm": 1.0343477725982666, | |
| "learning_rate": 1.1064543168482817e-05, | |
| "loss": 0.1434, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.9471919530595139, | |
| "grad_norm": 0.16045677661895752, | |
| "learning_rate": 1.0645431684828165e-05, | |
| "loss": 0.1448, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.9492875104777871, | |
| "grad_norm": 0.14702020585536957, | |
| "learning_rate": 1.0226320201173514e-05, | |
| "loss": 0.1468, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.9513830678960603, | |
| "grad_norm": 0.14469429850578308, | |
| "learning_rate": 9.80720871751886e-06, | |
| "loss": 0.1434, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.9534786253143336, | |
| "grad_norm": 0.1469959169626236, | |
| "learning_rate": 9.388097233864209e-06, | |
| "loss": 0.1487, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.9555741827326069, | |
| "grad_norm": 0.13562710583209991, | |
| "learning_rate": 8.968985750209557e-06, | |
| "loss": 0.1477, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.9576697401508801, | |
| "grad_norm": 0.15792514383792877, | |
| "learning_rate": 8.549874266554904e-06, | |
| "loss": 0.149, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.9597652975691534, | |
| "grad_norm": 0.13964731991291046, | |
| "learning_rate": 8.130762782900252e-06, | |
| "loss": 0.144, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.9618608549874267, | |
| "grad_norm": 0.1598869264125824, | |
| "learning_rate": 7.711651299245599e-06, | |
| "loss": 0.1486, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.9639564124057, | |
| "grad_norm": 11.353910446166992, | |
| "learning_rate": 7.292539815590947e-06, | |
| "loss": 0.1441, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9660519698239731, | |
| "grad_norm": 0.9498974084854126, | |
| "learning_rate": 6.873428331936295e-06, | |
| "loss": 0.149, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.9681475272422464, | |
| "grad_norm": 0.15554401278495789, | |
| "learning_rate": 6.454316848281643e-06, | |
| "loss": 0.1449, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.9702430846605197, | |
| "grad_norm": 0.16430126130580902, | |
| "learning_rate": 6.035205364626991e-06, | |
| "loss": 0.1451, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.972338642078793, | |
| "grad_norm": 0.15480098128318787, | |
| "learning_rate": 5.616093880972339e-06, | |
| "loss": 0.146, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.9744341994970662, | |
| "grad_norm": 0.15997706353664398, | |
| "learning_rate": 5.1969823973176864e-06, | |
| "loss": 0.1483, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.9765297569153395, | |
| "grad_norm": 0.15260910987854004, | |
| "learning_rate": 4.777870913663034e-06, | |
| "loss": 0.1472, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.9786253143336128, | |
| "grad_norm": 0.15502935647964478, | |
| "learning_rate": 4.358759430008382e-06, | |
| "loss": 0.1456, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.980720871751886, | |
| "grad_norm": 0.14989130198955536, | |
| "learning_rate": 3.939647946353731e-06, | |
| "loss": 0.1464, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.9828164291701592, | |
| "grad_norm": 0.1601138710975647, | |
| "learning_rate": 3.5205364626990782e-06, | |
| "loss": 0.1479, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.9849119865884325, | |
| "grad_norm": 0.15280233323574066, | |
| "learning_rate": 3.101424979044426e-06, | |
| "loss": 0.1459, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.9870075440067058, | |
| "grad_norm": 0.15722650289535522, | |
| "learning_rate": 2.6823134953897737e-06, | |
| "loss": 0.1484, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.989103101424979, | |
| "grad_norm": 0.15498095750808716, | |
| "learning_rate": 2.2632020117351217e-06, | |
| "loss": 0.1486, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.9911986588432523, | |
| "grad_norm": 0.16407634317874908, | |
| "learning_rate": 1.8440905280804694e-06, | |
| "loss": 0.1508, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.9932942162615256, | |
| "grad_norm": 0.1465112566947937, | |
| "learning_rate": 1.4249790444258174e-06, | |
| "loss": 0.1482, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.9953897736797989, | |
| "grad_norm": 0.1511894315481186, | |
| "learning_rate": 1.005867560771165e-06, | |
| "loss": 0.1458, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.9974853310980721, | |
| "grad_norm": 0.15166474878787994, | |
| "learning_rate": 5.86756077116513e-07, | |
| "loss": 0.147, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.9995808885163453, | |
| "grad_norm": 1.638846516609192, | |
| "learning_rate": 1.6764459346186086e-07, | |
| "loss": 0.1469, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.14722344279289246, | |
| "eval_runtime": 1611.1392, | |
| "eval_samples_per_second": 5.265, | |
| "eval_steps_per_second": 0.659, | |
| "step": 2386 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2386, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.667359824212132e+17, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |