| { | |
| "best_global_step": 4000, | |
| "best_metric": 0.7443897795757539, | |
| "best_model_checkpoint": "/shared/sutd/tej/Finegrained_PRM/models/Qwen2_5_Math_7b_instruct_more_data_run1/checkpoint-4000", | |
| "epoch": 0.8958880139982502, | |
| "eval_steps": 1000, | |
| "global_step": 4000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0, | |
| "eval_loss": 12.598268508911133, | |
| "eval_runtime": 512.8513, | |
| "eval_samples_per_second": 234.602, | |
| "eval_steps_per_second": 14.663, | |
| "eval_token_accuracy": 0.6931271686857003, | |
| "step": 0 | |
| }, | |
| { | |
| "epoch": 0.0022397200349956255, | |
| "grad_norm": 74.01309967041016, | |
| "learning_rate": 5.03919372900336e-08, | |
| "loss": 12.6044, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.004479440069991251, | |
| "grad_norm": 69.36959838867188, | |
| "learning_rate": 1.0638297872340426e-07, | |
| "loss": 12.6075, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.006719160104986877, | |
| "grad_norm": 79.40869140625, | |
| "learning_rate": 1.6237402015677493e-07, | |
| "loss": 12.5963, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.008958880139982502, | |
| "grad_norm": 67.53589630126953, | |
| "learning_rate": 2.1836506159014558e-07, | |
| "loss": 12.5561, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.011198600174978127, | |
| "grad_norm": 77.55623626708984, | |
| "learning_rate": 2.7435610302351626e-07, | |
| "loss": 12.4879, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.013438320209973754, | |
| "grad_norm": 57.3841552734375, | |
| "learning_rate": 3.303471444568869e-07, | |
| "loss": 12.3546, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.01567804024496938, | |
| "grad_norm": 56.3227424621582, | |
| "learning_rate": 3.863381858902576e-07, | |
| "loss": 12.2106, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.017917760279965004, | |
| "grad_norm": 85.61465454101562, | |
| "learning_rate": 4.423292273236283e-07, | |
| "loss": 11.8673, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.02015748031496063, | |
| "grad_norm": 89.71321868896484, | |
| "learning_rate": 4.98320268756999e-07, | |
| "loss": 11.414, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.022397200349956254, | |
| "grad_norm": 133.00584411621094, | |
| "learning_rate": 5.543113101903696e-07, | |
| "loss": 10.6907, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.024636920384951883, | |
| "grad_norm": 126.6435775756836, | |
| "learning_rate": 6.103023516237402e-07, | |
| "loss": 9.8793, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.026876640419947508, | |
| "grad_norm": 129.69862365722656, | |
| "learning_rate": 6.662933930571109e-07, | |
| "loss": 9.3174, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.029116360454943133, | |
| "grad_norm": 139.8076629638672, | |
| "learning_rate": 7.222844344904815e-07, | |
| "loss": 8.8794, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.03135608048993876, | |
| "grad_norm": 139.58309936523438, | |
| "learning_rate": 7.782754759238523e-07, | |
| "loss": 8.5155, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03359580052493438, | |
| "grad_norm": 161.12001037597656, | |
| "learning_rate": 8.342665173572229e-07, | |
| "loss": 8.1803, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.03583552055993001, | |
| "grad_norm": 145.7100830078125, | |
| "learning_rate": 8.902575587905936e-07, | |
| "loss": 7.8429, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.03807524059492563, | |
| "grad_norm": 141.91400146484375, | |
| "learning_rate": 9.462486002239643e-07, | |
| "loss": 7.5282, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.04031496062992126, | |
| "grad_norm": 160.6728973388672, | |
| "learning_rate": 1.0022396416573349e-06, | |
| "loss": 7.1987, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.04255468066491688, | |
| "grad_norm": 147.54306030273438, | |
| "learning_rate": 1.0582306830907057e-06, | |
| "loss": 6.8656, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.04479440069991251, | |
| "grad_norm": 170.87681579589844, | |
| "learning_rate": 1.1142217245240761e-06, | |
| "loss": 6.5252, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.04703412073490813, | |
| "grad_norm": 152.63320922851562, | |
| "learning_rate": 1.170212765957447e-06, | |
| "loss": 6.179, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.049273840769903765, | |
| "grad_norm": 149.25778198242188, | |
| "learning_rate": 1.2262038073908176e-06, | |
| "loss": 5.8261, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.05151356080489939, | |
| "grad_norm": 162.14759826660156, | |
| "learning_rate": 1.2821948488241882e-06, | |
| "loss": 5.4615, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.053753280839895015, | |
| "grad_norm": 150.42588806152344, | |
| "learning_rate": 1.338185890257559e-06, | |
| "loss": 5.0995, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.05599300087489064, | |
| "grad_norm": 180.09596252441406, | |
| "learning_rate": 1.3941769316909296e-06, | |
| "loss": 4.7214, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.058232720909886265, | |
| "grad_norm": 151.71328735351562, | |
| "learning_rate": 1.4501679731243e-06, | |
| "loss": 4.3411, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.06047244094488189, | |
| "grad_norm": 149.18179321289062, | |
| "learning_rate": 1.506159014557671e-06, | |
| "loss": 3.9648, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.06271216097987752, | |
| "grad_norm": 158.7989959716797, | |
| "learning_rate": 1.5621500559910415e-06, | |
| "loss": 3.5754, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.06495188101487315, | |
| "grad_norm": 142.4947509765625, | |
| "learning_rate": 1.6181410974244121e-06, | |
| "loss": 3.198, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.06719160104986877, | |
| "grad_norm": 172.62350463867188, | |
| "learning_rate": 1.674132138857783e-06, | |
| "loss": 2.8298, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.0694313210848644, | |
| "grad_norm": 133.85870361328125, | |
| "learning_rate": 1.7301231802911536e-06, | |
| "loss": 2.471, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.07167104111986002, | |
| "grad_norm": 118.70149230957031, | |
| "learning_rate": 1.7861142217245242e-06, | |
| "loss": 2.1336, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.07391076115485565, | |
| "grad_norm": 116.01596069335938, | |
| "learning_rate": 1.8421052631578948e-06, | |
| "loss": 1.8315, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.07615048118985127, | |
| "grad_norm": 91.01036071777344, | |
| "learning_rate": 1.8980963045912657e-06, | |
| "loss": 1.5717, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.0783902012248469, | |
| "grad_norm": 118.93321228027344, | |
| "learning_rate": 1.954087346024636e-06, | |
| "loss": 1.3266, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.08062992125984252, | |
| "grad_norm": 67.08316040039062, | |
| "learning_rate": 2.010078387458007e-06, | |
| "loss": 1.1372, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.08286964129483815, | |
| "grad_norm": 42.60381317138672, | |
| "learning_rate": 2.0660694288913777e-06, | |
| "loss": 0.9771, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.08510936132983377, | |
| "grad_norm": 46.03950500488281, | |
| "learning_rate": 2.122060470324748e-06, | |
| "loss": 0.8681, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.0873490813648294, | |
| "grad_norm": 23.767711639404297, | |
| "learning_rate": 2.178051511758119e-06, | |
| "loss": 0.7751, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.08958880139982502, | |
| "grad_norm": 45.41588592529297, | |
| "learning_rate": 2.2340425531914894e-06, | |
| "loss": 0.6874, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.09182852143482065, | |
| "grad_norm": 18.76463508605957, | |
| "learning_rate": 2.2900335946248602e-06, | |
| "loss": 0.6544, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.09406824146981627, | |
| "grad_norm": 21.69906234741211, | |
| "learning_rate": 2.346024636058231e-06, | |
| "loss": 0.5834, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.0963079615048119, | |
| "grad_norm": 29.034330368041992, | |
| "learning_rate": 2.4020156774916015e-06, | |
| "loss": 0.5398, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.09854768153980753, | |
| "grad_norm": 12.911064147949219, | |
| "learning_rate": 2.4580067189249723e-06, | |
| "loss": 0.4761, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.10078740157480315, | |
| "grad_norm": 32.778602600097656, | |
| "learning_rate": 2.5139977603583427e-06, | |
| "loss": 0.4394, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.10302712160979878, | |
| "grad_norm": 48.48755645751953, | |
| "learning_rate": 2.5699888017917135e-06, | |
| "loss": 0.4834, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.1052668416447944, | |
| "grad_norm": 24.921396255493164, | |
| "learning_rate": 2.6259798432250844e-06, | |
| "loss": 0.4231, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.10750656167979003, | |
| "grad_norm": 7.630334377288818, | |
| "learning_rate": 2.6819708846584548e-06, | |
| "loss": 0.3715, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.10974628171478565, | |
| "grad_norm": 36.03708267211914, | |
| "learning_rate": 2.7379619260918256e-06, | |
| "loss": 0.37, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.11198600174978128, | |
| "grad_norm": 33.10087585449219, | |
| "learning_rate": 2.7939529675251964e-06, | |
| "loss": 0.3522, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1142257217847769, | |
| "grad_norm": 8.544207572937012, | |
| "learning_rate": 2.849944008958567e-06, | |
| "loss": 0.3339, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.11646544181977253, | |
| "grad_norm": 37.781951904296875, | |
| "learning_rate": 2.9059350503919377e-06, | |
| "loss": 0.325, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.11870516185476815, | |
| "grad_norm": 9.192522048950195, | |
| "learning_rate": 2.9619260918253085e-06, | |
| "loss": 0.3175, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.12094488188976378, | |
| "grad_norm": 35.02260208129883, | |
| "learning_rate": 3.017917133258679e-06, | |
| "loss": 0.3193, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.1231846019247594, | |
| "grad_norm": 4.580764293670654, | |
| "learning_rate": 3.0739081746920498e-06, | |
| "loss": 0.3122, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.12542432195975503, | |
| "grad_norm": 12.692420959472656, | |
| "learning_rate": 3.1298992161254197e-06, | |
| "loss": 0.3234, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.12766404199475065, | |
| "grad_norm": 5.639922618865967, | |
| "learning_rate": 3.1858902575587906e-06, | |
| "loss": 0.2774, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.1299037620297463, | |
| "grad_norm": 2.170525550842285, | |
| "learning_rate": 3.241881298992162e-06, | |
| "loss": 0.2828, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.1321434820647419, | |
| "grad_norm": 2.8537590503692627, | |
| "learning_rate": 3.297872340425532e-06, | |
| "loss": 0.2961, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.13438320209973753, | |
| "grad_norm": 4.110259532928467, | |
| "learning_rate": 3.3538633818589027e-06, | |
| "loss": 0.2664, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.13662292213473315, | |
| "grad_norm": 5.937915802001953, | |
| "learning_rate": 3.4098544232922735e-06, | |
| "loss": 0.3023, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.1388626421697288, | |
| "grad_norm": 14.195796012878418, | |
| "learning_rate": 3.465845464725644e-06, | |
| "loss": 0.2662, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.1411023622047244, | |
| "grad_norm": 8.165706634521484, | |
| "learning_rate": 3.5218365061590147e-06, | |
| "loss": 0.2909, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.14334208223972003, | |
| "grad_norm": 9.505974769592285, | |
| "learning_rate": 3.5778275475923856e-06, | |
| "loss": 0.277, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.14558180227471565, | |
| "grad_norm": 6.215830326080322, | |
| "learning_rate": 3.633818589025756e-06, | |
| "loss": 0.2649, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.1478215223097113, | |
| "grad_norm": 16.8066463470459, | |
| "learning_rate": 3.689809630459127e-06, | |
| "loss": 0.299, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.1500612423447069, | |
| "grad_norm": 8.53093147277832, | |
| "learning_rate": 3.7458006718924976e-06, | |
| "loss": 0.2673, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.15230096237970253, | |
| "grad_norm": 6.421195030212402, | |
| "learning_rate": 3.801791713325868e-06, | |
| "loss": 0.2792, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.15454068241469815, | |
| "grad_norm": 2.7667346000671387, | |
| "learning_rate": 3.857782754759239e-06, | |
| "loss": 0.2909, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.1567804024496938, | |
| "grad_norm": 10.2630033493042, | |
| "learning_rate": 3.91377379619261e-06, | |
| "loss": 0.2716, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.1590201224846894, | |
| "grad_norm": 1.535446286201477, | |
| "learning_rate": 3.96976483762598e-06, | |
| "loss": 0.2816, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.16125984251968503, | |
| "grad_norm": 9.585880279541016, | |
| "learning_rate": 4.025755879059351e-06, | |
| "loss": 0.2553, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.16349956255468068, | |
| "grad_norm": 14.28587818145752, | |
| "learning_rate": 4.081746920492721e-06, | |
| "loss": 0.2825, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.1657392825896763, | |
| "grad_norm": 4.546958923339844, | |
| "learning_rate": 4.137737961926092e-06, | |
| "loss": 0.2718, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.1679790026246719, | |
| "grad_norm": 2.867119312286377, | |
| "learning_rate": 4.193729003359463e-06, | |
| "loss": 0.2507, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.17021872265966753, | |
| "grad_norm": 14.170395851135254, | |
| "learning_rate": 4.2497200447928334e-06, | |
| "loss": 0.2677, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.17245844269466318, | |
| "grad_norm": 17.095518112182617, | |
| "learning_rate": 4.305711086226204e-06, | |
| "loss": 0.2506, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.1746981627296588, | |
| "grad_norm": 5.597836017608643, | |
| "learning_rate": 4.361702127659575e-06, | |
| "loss": 0.2871, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.17693788276465441, | |
| "grad_norm": 14.059051513671875, | |
| "learning_rate": 4.417693169092945e-06, | |
| "loss": 0.2731, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.17917760279965003, | |
| "grad_norm": 4.634145259857178, | |
| "learning_rate": 4.473684210526316e-06, | |
| "loss": 0.2461, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.18141732283464568, | |
| "grad_norm": 2.4694900512695312, | |
| "learning_rate": 4.529675251959687e-06, | |
| "loss": 0.2798, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.1836570428696413, | |
| "grad_norm": 2.7586958408355713, | |
| "learning_rate": 4.585666293393058e-06, | |
| "loss": 0.263, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.18589676290463691, | |
| "grad_norm": 1.317171335220337, | |
| "learning_rate": 4.6416573348264284e-06, | |
| "loss": 0.2602, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.18813648293963253, | |
| "grad_norm": 16.16120719909668, | |
| "learning_rate": 4.697648376259799e-06, | |
| "loss": 0.2567, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.19037620297462818, | |
| "grad_norm": 1.187530279159546, | |
| "learning_rate": 4.753639417693169e-06, | |
| "loss": 0.2715, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.1926159230096238, | |
| "grad_norm": 11.328052520751953, | |
| "learning_rate": 4.80963045912654e-06, | |
| "loss": 0.2785, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.19485564304461941, | |
| "grad_norm": 5.0461530685424805, | |
| "learning_rate": 4.865621500559911e-06, | |
| "loss": 0.2466, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.19709536307961506, | |
| "grad_norm": 2.635329246520996, | |
| "learning_rate": 4.921612541993282e-06, | |
| "loss": 0.2689, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.19933508311461068, | |
| "grad_norm": 18.956153869628906, | |
| "learning_rate": 4.977603583426653e-06, | |
| "loss": 0.2594, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.2015748031496063, | |
| "grad_norm": 1.4908218383789062, | |
| "learning_rate": 5.033594624860023e-06, | |
| "loss": 0.2559, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.20381452318460191, | |
| "grad_norm": 13.888111114501953, | |
| "learning_rate": 5.089585666293394e-06, | |
| "loss": 0.2859, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.20605424321959756, | |
| "grad_norm": 1.7197439670562744, | |
| "learning_rate": 5.145576707726763e-06, | |
| "loss": 0.2462, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.20829396325459318, | |
| "grad_norm": 1.1483347415924072, | |
| "learning_rate": 5.201567749160134e-06, | |
| "loss": 0.25, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.2105336832895888, | |
| "grad_norm": 10.579352378845215, | |
| "learning_rate": 5.257558790593506e-06, | |
| "loss": 0.2376, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.21277340332458441, | |
| "grad_norm": 2.291902542114258, | |
| "learning_rate": 5.313549832026877e-06, | |
| "loss": 0.2453, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.21501312335958006, | |
| "grad_norm": 5.439045429229736, | |
| "learning_rate": 5.3695408734602476e-06, | |
| "loss": 0.2984, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.21725284339457568, | |
| "grad_norm": 1.5583287477493286, | |
| "learning_rate": 5.425531914893617e-06, | |
| "loss": 0.256, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.2194925634295713, | |
| "grad_norm": 5.13300895690918, | |
| "learning_rate": 5.4815229563269875e-06, | |
| "loss": 0.259, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.22173228346456694, | |
| "grad_norm": 3.150655746459961, | |
| "learning_rate": 5.537513997760358e-06, | |
| "loss": 0.2447, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.22397200349956256, | |
| "grad_norm": 5.581788063049316, | |
| "learning_rate": 5.593505039193729e-06, | |
| "loss": 0.2388, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.22397200349956256, | |
| "eval_loss": 0.24665296077728271, | |
| "eval_runtime": 508.3924, | |
| "eval_samples_per_second": 236.66, | |
| "eval_steps_per_second": 14.792, | |
| "eval_token_accuracy": 0.6965583269229705, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.22621172353455818, | |
| "grad_norm": 6.410147666931152, | |
| "learning_rate": 5.6494960806271e-06, | |
| "loss": 0.2569, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.2284514435695538, | |
| "grad_norm": 8.687148094177246, | |
| "learning_rate": 5.705487122060471e-06, | |
| "loss": 0.2499, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.23069116360454944, | |
| "grad_norm": 6.87606954574585, | |
| "learning_rate": 5.761478163493841e-06, | |
| "loss": 0.2555, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.23293088363954506, | |
| "grad_norm": 1.6779398918151855, | |
| "learning_rate": 5.817469204927212e-06, | |
| "loss": 0.2556, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.23517060367454068, | |
| "grad_norm": 7.228851318359375, | |
| "learning_rate": 5.8734602463605825e-06, | |
| "loss": 0.245, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.2374103237095363, | |
| "grad_norm": 5.364595890045166, | |
| "learning_rate": 5.929451287793953e-06, | |
| "loss": 0.2717, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.23965004374453194, | |
| "grad_norm": 3.2734596729278564, | |
| "learning_rate": 5.985442329227324e-06, | |
| "loss": 0.2274, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.24188976377952756, | |
| "grad_norm": 4.536466598510742, | |
| "learning_rate": 6.041433370660694e-06, | |
| "loss": 0.2561, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.24412948381452318, | |
| "grad_norm": 7.9406280517578125, | |
| "learning_rate": 6.097424412094065e-06, | |
| "loss": 0.2428, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.2463692038495188, | |
| "grad_norm": 6.413431644439697, | |
| "learning_rate": 6.153415453527436e-06, | |
| "loss": 0.2327, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.24860892388451444, | |
| "grad_norm": 1.6045399904251099, | |
| "learning_rate": 6.209406494960807e-06, | |
| "loss": 0.2704, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.25084864391951006, | |
| "grad_norm": 8.93922233581543, | |
| "learning_rate": 6.2653975363941775e-06, | |
| "loss": 0.2365, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.2530883639545057, | |
| "grad_norm": 6.114246845245361, | |
| "learning_rate": 6.321388577827548e-06, | |
| "loss": 0.2476, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.2553280839895013, | |
| "grad_norm": 4.84804105758667, | |
| "learning_rate": 6.377379619260918e-06, | |
| "loss": 0.2411, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.2575678040244969, | |
| "grad_norm": 2.421581506729126, | |
| "learning_rate": 6.433370660694289e-06, | |
| "loss": 0.2251, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.2598075240594926, | |
| "grad_norm": 13.80971622467041, | |
| "learning_rate": 6.48936170212766e-06, | |
| "loss": 0.2541, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.2620472440944882, | |
| "grad_norm": 1.8289860486984253, | |
| "learning_rate": 6.545352743561031e-06, | |
| "loss": 0.2462, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.2642869641294838, | |
| "grad_norm": 1.4232782125473022, | |
| "learning_rate": 6.601343784994402e-06, | |
| "loss": 0.2394, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.26652668416447944, | |
| "grad_norm": 6.040958404541016, | |
| "learning_rate": 6.6573348264277725e-06, | |
| "loss": 0.2441, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.26876640419947506, | |
| "grad_norm": 5.110551357269287, | |
| "learning_rate": 6.7133258678611425e-06, | |
| "loss": 0.2285, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.2710061242344707, | |
| "grad_norm": 14.493913650512695, | |
| "learning_rate": 6.769316909294513e-06, | |
| "loss": 0.2545, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.2732458442694663, | |
| "grad_norm": 6.708395481109619, | |
| "learning_rate": 6.825307950727884e-06, | |
| "loss": 0.2399, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.2754855643044619, | |
| "grad_norm": 7.098331451416016, | |
| "learning_rate": 6.881298992161255e-06, | |
| "loss": 0.2529, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.2777252843394576, | |
| "grad_norm": 7.667686462402344, | |
| "learning_rate": 6.937290033594626e-06, | |
| "loss": 0.2265, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.2799650043744532, | |
| "grad_norm": 3.293351411819458, | |
| "learning_rate": 6.993281075027996e-06, | |
| "loss": 0.212, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.2822047244094488, | |
| "grad_norm": 6.79879093170166, | |
| "learning_rate": 7.049272116461367e-06, | |
| "loss": 0.2496, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.28444444444444444, | |
| "grad_norm": 4.2775092124938965, | |
| "learning_rate": 7.1052631578947375e-06, | |
| "loss": 0.2219, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.28668416447944006, | |
| "grad_norm": 2.1673076152801514, | |
| "learning_rate": 7.161254199328108e-06, | |
| "loss": 0.2374, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.2889238845144357, | |
| "grad_norm": 5.312763214111328, | |
| "learning_rate": 7.217245240761479e-06, | |
| "loss": 0.2256, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.2911636045494313, | |
| "grad_norm": 4.370241165161133, | |
| "learning_rate": 7.27323628219485e-06, | |
| "loss": 0.2357, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.29340332458442697, | |
| "grad_norm": 3.1714890003204346, | |
| "learning_rate": 7.32922732362822e-06, | |
| "loss": 0.2557, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.2956430446194226, | |
| "grad_norm": 2.53678297996521, | |
| "learning_rate": 7.385218365061591e-06, | |
| "loss": 0.2331, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.2978827646544182, | |
| "grad_norm": 3.696383476257324, | |
| "learning_rate": 7.441209406494962e-06, | |
| "loss": 0.232, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.3001224846894138, | |
| "grad_norm": 4.8781232833862305, | |
| "learning_rate": 7.4972004479283325e-06, | |
| "loss": 0.2306, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.30236220472440944, | |
| "grad_norm": 1.83755362033844, | |
| "learning_rate": 7.553191489361703e-06, | |
| "loss": 0.2337, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.30460192475940506, | |
| "grad_norm": 11.356858253479004, | |
| "learning_rate": 7.609182530795074e-06, | |
| "loss": 0.2459, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.3068416447944007, | |
| "grad_norm": 8.49129581451416, | |
| "learning_rate": 7.665173572228444e-06, | |
| "loss": 0.2293, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.3090813648293963, | |
| "grad_norm": 2.2894465923309326, | |
| "learning_rate": 7.721164613661814e-06, | |
| "loss": 0.2383, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.311321084864392, | |
| "grad_norm": 1.8182672262191772, | |
| "learning_rate": 7.777155655095186e-06, | |
| "loss": 0.241, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.3135608048993876, | |
| "grad_norm": 3.236206293106079, | |
| "learning_rate": 7.833146696528556e-06, | |
| "loss": 0.2285, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.3158005249343832, | |
| "grad_norm": 1.631281852722168, | |
| "learning_rate": 7.889137737961927e-06, | |
| "loss": 0.2571, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.3180402449693788, | |
| "grad_norm": 8.107253074645996, | |
| "learning_rate": 7.945128779395297e-06, | |
| "loss": 0.2209, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.32027996500437445, | |
| "grad_norm": 1.1233727931976318, | |
| "learning_rate": 8.001119820828667e-06, | |
| "loss": 0.2427, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.32251968503937006, | |
| "grad_norm": 8.097697257995605, | |
| "learning_rate": 8.057110862262039e-06, | |
| "loss": 0.2452, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.3247594050743657, | |
| "grad_norm": 2.3983142375946045, | |
| "learning_rate": 8.113101903695409e-06, | |
| "loss": 0.2186, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.32699912510936135, | |
| "grad_norm": 12.040475845336914, | |
| "learning_rate": 8.16909294512878e-06, | |
| "loss": 0.2438, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.329238845144357, | |
| "grad_norm": 1.604379653930664, | |
| "learning_rate": 8.22508398656215e-06, | |
| "loss": 0.2395, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.3314785651793526, | |
| "grad_norm": 3.6802425384521484, | |
| "learning_rate": 8.28107502799552e-06, | |
| "loss": 0.2417, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.3337182852143482, | |
| "grad_norm": 1.3274339437484741, | |
| "learning_rate": 8.337066069428892e-06, | |
| "loss": 0.2333, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.3359580052493438, | |
| "grad_norm": 2.7458624839782715, | |
| "learning_rate": 8.393057110862262e-06, | |
| "loss": 0.2099, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.33819772528433945, | |
| "grad_norm": 2.586228847503662, | |
| "learning_rate": 8.449048152295634e-06, | |
| "loss": 0.2487, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.34043744531933506, | |
| "grad_norm": 4.21591329574585, | |
| "learning_rate": 8.505039193729004e-06, | |
| "loss": 0.2232, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.3426771653543307, | |
| "grad_norm": 10.471442222595215, | |
| "learning_rate": 8.561030235162374e-06, | |
| "loss": 0.243, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.34491688538932636, | |
| "grad_norm": 2.1316909790039062, | |
| "learning_rate": 8.617021276595746e-06, | |
| "loss": 0.2245, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.347156605424322, | |
| "grad_norm": 2.768832206726074, | |
| "learning_rate": 8.673012318029116e-06, | |
| "loss": 0.231, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.3493963254593176, | |
| "grad_norm": 1.221541404724121, | |
| "learning_rate": 8.729003359462487e-06, | |
| "loss": 0.227, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.3516360454943132, | |
| "grad_norm": 7.449015140533447, | |
| "learning_rate": 8.784994400895857e-06, | |
| "loss": 0.2445, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.35387576552930883, | |
| "grad_norm": 3.9796910285949707, | |
| "learning_rate": 8.840985442329229e-06, | |
| "loss": 0.2312, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.35611548556430445, | |
| "grad_norm": 4.768671989440918, | |
| "learning_rate": 8.896976483762599e-06, | |
| "loss": 0.2371, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.35835520559930006, | |
| "grad_norm": 2.461826801300049, | |
| "learning_rate": 8.952967525195969e-06, | |
| "loss": 0.2342, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.36059492563429574, | |
| "grad_norm": 15.604554176330566, | |
| "learning_rate": 9.00895856662934e-06, | |
| "loss": 0.2639, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.36283464566929136, | |
| "grad_norm": 10.057650566101074, | |
| "learning_rate": 9.06494960806271e-06, | |
| "loss": 0.2364, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.365074365704287, | |
| "grad_norm": 1.363266110420227, | |
| "learning_rate": 9.120940649496082e-06, | |
| "loss": 0.2284, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.3673140857392826, | |
| "grad_norm": 6.2388763427734375, | |
| "learning_rate": 9.176931690929452e-06, | |
| "loss": 0.2363, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.3695538057742782, | |
| "grad_norm": 5.490090370178223, | |
| "learning_rate": 9.232922732362822e-06, | |
| "loss": 0.2274, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.37179352580927383, | |
| "grad_norm": 4.038967132568359, | |
| "learning_rate": 9.288913773796194e-06, | |
| "loss": 0.2431, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.37403324584426945, | |
| "grad_norm": 2.4451630115509033, | |
| "learning_rate": 9.344904815229564e-06, | |
| "loss": 0.2269, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.37627296587926506, | |
| "grad_norm": 7.310824871063232, | |
| "learning_rate": 9.400895856662936e-06, | |
| "loss": 0.2338, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.37851268591426074, | |
| "grad_norm": 3.421902656555176, | |
| "learning_rate": 9.456886898096306e-06, | |
| "loss": 0.225, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.38075240594925636, | |
| "grad_norm": 2.0587453842163086, | |
| "learning_rate": 9.512877939529676e-06, | |
| "loss": 0.2229, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.382992125984252, | |
| "grad_norm": 7.844244956970215, | |
| "learning_rate": 9.568868980963046e-06, | |
| "loss": 0.2471, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.3852318460192476, | |
| "grad_norm": 2.0231168270111084, | |
| "learning_rate": 9.624860022396417e-06, | |
| "loss": 0.2265, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.3874715660542432, | |
| "grad_norm": 4.695873737335205, | |
| "learning_rate": 9.680851063829787e-06, | |
| "loss": 0.2319, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.38971128608923883, | |
| "grad_norm": 4.3562703132629395, | |
| "learning_rate": 9.736842105263159e-06, | |
| "loss": 0.2402, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.39195100612423445, | |
| "grad_norm": 3.8503875732421875, | |
| "learning_rate": 9.79283314669653e-06, | |
| "loss": 0.2172, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.3941907261592301, | |
| "grad_norm": 1.6659972667694092, | |
| "learning_rate": 9.848824188129899e-06, | |
| "loss": 0.2556, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.39643044619422574, | |
| "grad_norm": 8.133085250854492, | |
| "learning_rate": 9.90481522956327e-06, | |
| "loss": 0.2404, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.39867016622922136, | |
| "grad_norm": 1.0106691122055054, | |
| "learning_rate": 9.96080627099664e-06, | |
| "loss": 0.2221, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.400909886264217, | |
| "grad_norm": 4.416316986083984, | |
| "learning_rate": 9.999999140094955e-06, | |
| "loss": 0.2338, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.4031496062992126, | |
| "grad_norm": 2.253812074661255, | |
| "learning_rate": 9.999983852902361e-06, | |
| "loss": 0.224, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.4053893263342082, | |
| "grad_norm": 3.477606773376465, | |
| "learning_rate": 9.999949456775993e-06, | |
| "loss": 0.2328, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.40762904636920383, | |
| "grad_norm": 4.672702789306641, | |
| "learning_rate": 9.9998959518473e-06, | |
| "loss": 0.2336, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.4098687664041995, | |
| "grad_norm": 1.8212214708328247, | |
| "learning_rate": 9.999823338320772e-06, | |
| "loss": 0.2283, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.4121084864391951, | |
| "grad_norm": 2.800435781478882, | |
| "learning_rate": 9.99973161647392e-06, | |
| "loss": 0.2379, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.41434820647419074, | |
| "grad_norm": 7.563694477081299, | |
| "learning_rate": 9.999620786657289e-06, | |
| "loss": 0.2188, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.41658792650918636, | |
| "grad_norm": 13.088976860046387, | |
| "learning_rate": 9.999490849294448e-06, | |
| "loss": 0.2567, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.418827646544182, | |
| "grad_norm": 1.8773629665374756, | |
| "learning_rate": 9.99934180488199e-06, | |
| "loss": 0.2316, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.4210673665791776, | |
| "grad_norm": 3.4965360164642334, | |
| "learning_rate": 9.999173653989533e-06, | |
| "loss": 0.2355, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.4233070866141732, | |
| "grad_norm": 3.4962074756622314, | |
| "learning_rate": 9.998986397259716e-06, | |
| "loss": 0.229, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.42554680664916883, | |
| "grad_norm": 1.0358535051345825, | |
| "learning_rate": 9.998780035408198e-06, | |
| "loss": 0.2235, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.4277865266841645, | |
| "grad_norm": 15.394458770751953, | |
| "learning_rate": 9.998554569223652e-06, | |
| "loss": 0.2484, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.4300262467191601, | |
| "grad_norm": 2.1868062019348145, | |
| "learning_rate": 9.998309999567764e-06, | |
| "loss": 0.2382, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.43226596675415574, | |
| "grad_norm": 3.1081955432891846, | |
| "learning_rate": 9.99804632737523e-06, | |
| "loss": 0.229, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.43450568678915136, | |
| "grad_norm": 1.4906216859817505, | |
| "learning_rate": 9.997763553653752e-06, | |
| "loss": 0.2319, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.436745406824147, | |
| "grad_norm": 3.580657482147217, | |
| "learning_rate": 9.997461679484034e-06, | |
| "loss": 0.2156, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.4389851268591426, | |
| "grad_norm": 9.583480834960938, | |
| "learning_rate": 9.997140706019779e-06, | |
| "loss": 0.237, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.4412248468941382, | |
| "grad_norm": 5.272135257720947, | |
| "learning_rate": 9.996800634487685e-06, | |
| "loss": 0.227, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.4434645669291339, | |
| "grad_norm": 2.0222976207733154, | |
| "learning_rate": 9.996441466187434e-06, | |
| "loss": 0.2317, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.4457042869641295, | |
| "grad_norm": 1.1668893098831177, | |
| "learning_rate": 9.996063202491698e-06, | |
| "loss": 0.232, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.4479440069991251, | |
| "grad_norm": 10.037023544311523, | |
| "learning_rate": 9.995665844846119e-06, | |
| "loss": 0.2207, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.4479440069991251, | |
| "eval_loss": 0.23211318254470825, | |
| "eval_runtime": 506.8766, | |
| "eval_samples_per_second": 237.367, | |
| "eval_steps_per_second": 14.836, | |
| "eval_token_accuracy": 0.6974161164822881, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.45018372703412074, | |
| "grad_norm": 2.2029612064361572, | |
| "learning_rate": 9.995249394769327e-06, | |
| "loss": 0.2383, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.45242344706911636, | |
| "grad_norm": 9.830987930297852, | |
| "learning_rate": 9.994813853852903e-06, | |
| "loss": 0.2207, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.454663167104112, | |
| "grad_norm": 1.8869295120239258, | |
| "learning_rate": 9.9943592237614e-06, | |
| "loss": 0.2264, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.4569028871391076, | |
| "grad_norm": 1.761500597000122, | |
| "learning_rate": 9.993885506232324e-06, | |
| "loss": 0.2285, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.4591426071741032, | |
| "grad_norm": 4.521030426025391, | |
| "learning_rate": 9.993392703076126e-06, | |
| "loss": 0.2236, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.4613823272090989, | |
| "grad_norm": 1.2697912454605103, | |
| "learning_rate": 9.9928808161762e-06, | |
| "loss": 0.2341, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.4636220472440945, | |
| "grad_norm": 3.4168875217437744, | |
| "learning_rate": 9.992349847488878e-06, | |
| "loss": 0.2126, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.4658617672790901, | |
| "grad_norm": 5.974389553070068, | |
| "learning_rate": 9.991799799043413e-06, | |
| "loss": 0.2372, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.46810148731408574, | |
| "grad_norm": 1.9752360582351685, | |
| "learning_rate": 9.991230672941982e-06, | |
| "loss": 0.2288, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.47034120734908136, | |
| "grad_norm": 1.7691487073898315, | |
| "learning_rate": 9.990642471359668e-06, | |
| "loss": 0.2142, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.472580927384077, | |
| "grad_norm": 4.6645731925964355, | |
| "learning_rate": 9.990035196544461e-06, | |
| "loss": 0.2403, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.4748206474190726, | |
| "grad_norm": 2.952207565307617, | |
| "learning_rate": 9.989408850817243e-06, | |
| "loss": 0.2269, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.47706036745406827, | |
| "grad_norm": 1.3406009674072266, | |
| "learning_rate": 9.988763436571783e-06, | |
| "loss": 0.2302, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.4793000874890639, | |
| "grad_norm": 1.8972488641738892, | |
| "learning_rate": 9.98809895627472e-06, | |
| "loss": 0.2271, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.4815398075240595, | |
| "grad_norm": 2.660292863845825, | |
| "learning_rate": 9.987415412465568e-06, | |
| "loss": 0.2278, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.4837795275590551, | |
| "grad_norm": 2.4427502155303955, | |
| "learning_rate": 9.986712807756695e-06, | |
| "loss": 0.2633, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.48601924759405074, | |
| "grad_norm": 5.147864818572998, | |
| "learning_rate": 9.98599114483331e-06, | |
| "loss": 0.23, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.48825896762904636, | |
| "grad_norm": 5.563263893127441, | |
| "learning_rate": 9.98525042645347e-06, | |
| "loss": 0.2378, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.490498687664042, | |
| "grad_norm": 6.136834621429443, | |
| "learning_rate": 9.984490655448049e-06, | |
| "loss": 0.2303, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.4927384076990376, | |
| "grad_norm": 2.446849822998047, | |
| "learning_rate": 9.983711834720738e-06, | |
| "loss": 0.2234, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.49497812773403327, | |
| "grad_norm": 4.507411956787109, | |
| "learning_rate": 9.982913967248035e-06, | |
| "loss": 0.2462, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.4972178477690289, | |
| "grad_norm": 1.4177577495574951, | |
| "learning_rate": 9.982097056079228e-06, | |
| "loss": 0.2254, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.4994575678040245, | |
| "grad_norm": 7.209784030914307, | |
| "learning_rate": 9.981261104336389e-06, | |
| "loss": 0.2264, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.5016972878390201, | |
| "grad_norm": 18.120464324951172, | |
| "learning_rate": 9.980406115214353e-06, | |
| "loss": 0.2385, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.5039370078740157, | |
| "grad_norm": 1.6847277879714966, | |
| "learning_rate": 9.979532091980723e-06, | |
| "loss": 0.2201, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.5061767279090114, | |
| "grad_norm": 2.8450379371643066, | |
| "learning_rate": 9.97863903797584e-06, | |
| "loss": 0.2419, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.508416447944007, | |
| "grad_norm": 4.046864032745361, | |
| "learning_rate": 9.97772695661277e-06, | |
| "loss": 0.2259, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.5106561679790026, | |
| "grad_norm": 1.2429368495941162, | |
| "learning_rate": 9.976795851377312e-06, | |
| "loss": 0.2272, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.5128958880139982, | |
| "grad_norm": 3.9099016189575195, | |
| "learning_rate": 9.975845725827959e-06, | |
| "loss": 0.232, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.5151356080489938, | |
| "grad_norm": 1.1243308782577515, | |
| "learning_rate": 9.9748765835959e-06, | |
| "loss": 0.2198, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.5173753280839894, | |
| "grad_norm": 5.873220443725586, | |
| "learning_rate": 9.973888428385006e-06, | |
| "loss": 0.2435, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.5196150481189852, | |
| "grad_norm": 1.5939319133758545, | |
| "learning_rate": 9.972881263971803e-06, | |
| "loss": 0.2246, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.5218547681539808, | |
| "grad_norm": 4.427680969238281, | |
| "learning_rate": 9.971855094205473e-06, | |
| "loss": 0.2324, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.5240944881889764, | |
| "grad_norm": 2.819718360900879, | |
| "learning_rate": 9.97080992300783e-06, | |
| "loss": 0.2497, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.526334208223972, | |
| "grad_norm": 2.0332233905792236, | |
| "learning_rate": 9.969745754373311e-06, | |
| "loss": 0.2225, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.5285739282589677, | |
| "grad_norm": 12.013405799865723, | |
| "learning_rate": 9.968662592368952e-06, | |
| "loss": 0.2445, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.5308136482939633, | |
| "grad_norm": 10.702028274536133, | |
| "learning_rate": 9.967560441134381e-06, | |
| "loss": 0.2164, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.5330533683289589, | |
| "grad_norm": 5.7824907302856445, | |
| "learning_rate": 9.966439304881798e-06, | |
| "loss": 0.2213, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.5352930883639545, | |
| "grad_norm": 5.64496374130249, | |
| "learning_rate": 9.965299187895962e-06, | |
| "loss": 0.2288, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.5375328083989501, | |
| "grad_norm": 1.3699864149093628, | |
| "learning_rate": 9.964140094534169e-06, | |
| "loss": 0.2322, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.5397725284339457, | |
| "grad_norm": 4.486180305480957, | |
| "learning_rate": 9.962962029226244e-06, | |
| "loss": 0.2296, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.5420122484689414, | |
| "grad_norm": 2.6139583587646484, | |
| "learning_rate": 9.961764996474514e-06, | |
| "loss": 0.2161, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.544251968503937, | |
| "grad_norm": 1.0381577014923096, | |
| "learning_rate": 9.960549000853799e-06, | |
| "loss": 0.2275, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.5464916885389326, | |
| "grad_norm": 2.3962111473083496, | |
| "learning_rate": 9.959314047011389e-06, | |
| "loss": 0.23, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.5487314085739282, | |
| "grad_norm": 1.2226966619491577, | |
| "learning_rate": 9.958060139667027e-06, | |
| "loss": 0.2163, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.5509711286089238, | |
| "grad_norm": 6.7834062576293945, | |
| "learning_rate": 9.9567872836129e-06, | |
| "loss": 0.2324, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.5532108486439196, | |
| "grad_norm": 7.556985378265381, | |
| "learning_rate": 9.955495483713604e-06, | |
| "loss": 0.2186, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.5554505686789152, | |
| "grad_norm": 6.278934955596924, | |
| "learning_rate": 9.954184744906139e-06, | |
| "loss": 0.232, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.5576902887139108, | |
| "grad_norm": 1.6271597146987915, | |
| "learning_rate": 9.95285507219989e-06, | |
| "loss": 0.2324, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.5599300087489064, | |
| "grad_norm": 3.6434125900268555, | |
| "learning_rate": 9.951506470676592e-06, | |
| "loss": 0.2182, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.562169728783902, | |
| "grad_norm": 1.8215718269348145, | |
| "learning_rate": 9.950138945490335e-06, | |
| "loss": 0.2353, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.5644094488188977, | |
| "grad_norm": 3.433577299118042, | |
| "learning_rate": 9.948752501867522e-06, | |
| "loss": 0.2257, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.5666491688538933, | |
| "grad_norm": 4.02660608291626, | |
| "learning_rate": 9.947347145106865e-06, | |
| "loss": 0.2192, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.5688888888888889, | |
| "grad_norm": 1.360437273979187, | |
| "learning_rate": 9.945922880579351e-06, | |
| "loss": 0.2199, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.5711286089238845, | |
| "grad_norm": 1.155464768409729, | |
| "learning_rate": 9.944479713728237e-06, | |
| "loss": 0.1921, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.5733683289588801, | |
| "grad_norm": 7.086646556854248, | |
| "learning_rate": 9.943017650069013e-06, | |
| "loss": 0.242, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.5756080489938757, | |
| "grad_norm": 4.299362659454346, | |
| "learning_rate": 9.941536695189396e-06, | |
| "loss": 0.228, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.5778477690288714, | |
| "grad_norm": 8.915771484375, | |
| "learning_rate": 9.940036854749297e-06, | |
| "loss": 0.2308, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.580087489063867, | |
| "grad_norm": 2.6741573810577393, | |
| "learning_rate": 9.938518134480803e-06, | |
| "loss": 0.2398, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.5823272090988626, | |
| "grad_norm": 2.5663797855377197, | |
| "learning_rate": 9.93698054018816e-06, | |
| "loss": 0.2077, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.5845669291338582, | |
| "grad_norm": 2.6795785427093506, | |
| "learning_rate": 9.935424077747744e-06, | |
| "loss": 0.2367, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.5868066491688539, | |
| "grad_norm": 3.2071633338928223, | |
| "learning_rate": 9.933848753108041e-06, | |
| "loss": 0.2106, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.5890463692038496, | |
| "grad_norm": 3.1179144382476807, | |
| "learning_rate": 9.932254572289626e-06, | |
| "loss": 0.2446, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.5912860892388452, | |
| "grad_norm": 1.6278126239776611, | |
| "learning_rate": 9.930641541385138e-06, | |
| "loss": 0.2211, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.5935258092738408, | |
| "grad_norm": 5.3661885261535645, | |
| "learning_rate": 9.929009666559255e-06, | |
| "loss": 0.2107, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.5957655293088364, | |
| "grad_norm": 6.771117210388184, | |
| "learning_rate": 9.927358954048676e-06, | |
| "loss": 0.2399, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.598005249343832, | |
| "grad_norm": 2.178788185119629, | |
| "learning_rate": 9.925689410162095e-06, | |
| "loss": 0.21, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.6002449693788277, | |
| "grad_norm": 8.165093421936035, | |
| "learning_rate": 9.92400104128017e-06, | |
| "loss": 0.2246, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.6024846894138233, | |
| "grad_norm": 3.471679449081421, | |
| "learning_rate": 9.922293853855509e-06, | |
| "loss": 0.2131, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.6047244094488189, | |
| "grad_norm": 2.9488112926483154, | |
| "learning_rate": 9.92056785441264e-06, | |
| "loss": 0.2245, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.6069641294838145, | |
| "grad_norm": 5.4626145362854, | |
| "learning_rate": 9.918823049547984e-06, | |
| "loss": 0.2444, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.6092038495188101, | |
| "grad_norm": 1.8659770488739014, | |
| "learning_rate": 9.917059445929838e-06, | |
| "loss": 0.2226, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.6114435695538057, | |
| "grad_norm": 2.041576385498047, | |
| "learning_rate": 9.915277050298336e-06, | |
| "loss": 0.2401, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.6136832895888014, | |
| "grad_norm": 7.602120399475098, | |
| "learning_rate": 9.913475869465442e-06, | |
| "loss": 0.2105, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.615923009623797, | |
| "grad_norm": 1.5145801305770874, | |
| "learning_rate": 9.911655910314901e-06, | |
| "loss": 0.2168, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.6181627296587926, | |
| "grad_norm": 2.4858598709106445, | |
| "learning_rate": 9.909817179802234e-06, | |
| "loss": 0.2363, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.6204024496937883, | |
| "grad_norm": 2.1684751510620117, | |
| "learning_rate": 9.907959684954702e-06, | |
| "loss": 0.2234, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.622642169728784, | |
| "grad_norm": 1.7832976579666138, | |
| "learning_rate": 9.906083432871273e-06, | |
| "loss": 0.2282, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.6248818897637796, | |
| "grad_norm": 6.974465370178223, | |
| "learning_rate": 9.90418843072261e-06, | |
| "loss": 0.2264, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.6271216097987752, | |
| "grad_norm": 4.383815765380859, | |
| "learning_rate": 9.902274685751027e-06, | |
| "loss": 0.2145, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.6293613298337708, | |
| "grad_norm": 1.6938287019729614, | |
| "learning_rate": 9.900342205270475e-06, | |
| "loss": 0.2388, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.6316010498687664, | |
| "grad_norm": 14.980836868286133, | |
| "learning_rate": 9.898390996666502e-06, | |
| "loss": 0.2289, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.633840769903762, | |
| "grad_norm": 6.246406555175781, | |
| "learning_rate": 9.89642106739624e-06, | |
| "loss": 0.2319, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.6360804899387577, | |
| "grad_norm": 7.479663848876953, | |
| "learning_rate": 9.894432424988363e-06, | |
| "loss": 0.2224, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.6383202099737533, | |
| "grad_norm": 6.564876556396484, | |
| "learning_rate": 9.892425077043058e-06, | |
| "loss": 0.2185, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.6405599300087489, | |
| "grad_norm": 5.287945747375488, | |
| "learning_rate": 9.89039903123201e-06, | |
| "loss": 0.2352, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.6427996500437445, | |
| "grad_norm": 1.6189258098602295, | |
| "learning_rate": 9.888354295298356e-06, | |
| "loss": 0.2107, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.6450393700787401, | |
| "grad_norm": 1.7470353841781616, | |
| "learning_rate": 9.88629087705667e-06, | |
| "loss": 0.2284, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.6472790901137357, | |
| "grad_norm": 6.374598503112793, | |
| "learning_rate": 9.884208784392917e-06, | |
| "loss": 0.2295, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.6495188101487314, | |
| "grad_norm": 3.07970929145813, | |
| "learning_rate": 9.882108025264442e-06, | |
| "loss": 0.2199, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.651758530183727, | |
| "grad_norm": 5.308441638946533, | |
| "learning_rate": 9.87998860769992e-06, | |
| "loss": 0.2246, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.6539982502187227, | |
| "grad_norm": 8.279508590698242, | |
| "learning_rate": 9.877850539799341e-06, | |
| "loss": 0.2222, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.6562379702537183, | |
| "grad_norm": 6.9987006187438965, | |
| "learning_rate": 9.87569382973397e-06, | |
| "loss": 0.2319, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.658477690288714, | |
| "grad_norm": 4.786025524139404, | |
| "learning_rate": 9.873518485746321e-06, | |
| "loss": 0.2257, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.6607174103237096, | |
| "grad_norm": 3.0393166542053223, | |
| "learning_rate": 9.871324516150123e-06, | |
| "loss": 0.2153, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.6629571303587052, | |
| "grad_norm": 3.177739143371582, | |
| "learning_rate": 9.869111929330282e-06, | |
| "loss": 0.2232, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.6651968503937008, | |
| "grad_norm": 2.291867971420288, | |
| "learning_rate": 9.866880733742865e-06, | |
| "loss": 0.212, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.6674365704286964, | |
| "grad_norm": 3.9312686920166016, | |
| "learning_rate": 9.864630937915052e-06, | |
| "loss": 0.2262, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.669676290463692, | |
| "grad_norm": 2.110924482345581, | |
| "learning_rate": 9.862362550445108e-06, | |
| "loss": 0.2274, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.6719160104986877, | |
| "grad_norm": 1.4342049360275269, | |
| "learning_rate": 9.860075580002359e-06, | |
| "loss": 0.2171, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.6719160104986877, | |
| "eval_loss": 0.21779567003250122, | |
| "eval_runtime": 508.7876, | |
| "eval_samples_per_second": 236.476, | |
| "eval_steps_per_second": 14.78, | |
| "eval_token_accuracy": 0.7062596501325423, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.6741557305336833, | |
| "grad_norm": 1.5321989059448242, | |
| "learning_rate": 9.857770035327142e-06, | |
| "loss": 0.2255, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.6763954505686789, | |
| "grad_norm": 9.719202041625977, | |
| "learning_rate": 9.85544592523079e-06, | |
| "loss": 0.2185, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.6786351706036745, | |
| "grad_norm": 8.409933090209961, | |
| "learning_rate": 9.853103258595581e-06, | |
| "loss": 0.2295, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.6808748906386701, | |
| "grad_norm": 1.9369258880615234, | |
| "learning_rate": 9.85074204437472e-06, | |
| "loss": 0.2283, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.6831146106736657, | |
| "grad_norm": 2.4668948650360107, | |
| "learning_rate": 9.848362291592288e-06, | |
| "loss": 0.2123, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.6853543307086614, | |
| "grad_norm": 2.983069658279419, | |
| "learning_rate": 9.845964009343228e-06, | |
| "loss": 0.2315, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.6875940507436571, | |
| "grad_norm": 3.6539225578308105, | |
| "learning_rate": 9.843547206793289e-06, | |
| "loss": 0.2202, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.6898337707786527, | |
| "grad_norm": 9.64335823059082, | |
| "learning_rate": 9.841111893179006e-06, | |
| "loss": 0.2149, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.6920734908136483, | |
| "grad_norm": 11.048383712768555, | |
| "learning_rate": 9.838658077807657e-06, | |
| "loss": 0.2334, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.694313210848644, | |
| "grad_norm": 7.029510021209717, | |
| "learning_rate": 9.836185770057234e-06, | |
| "loss": 0.2093, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.6965529308836396, | |
| "grad_norm": 2.615210771560669, | |
| "learning_rate": 9.833694979376398e-06, | |
| "loss": 0.2294, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.6987926509186352, | |
| "grad_norm": 9.237405776977539, | |
| "learning_rate": 9.831185715284452e-06, | |
| "loss": 0.2225, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.7010323709536308, | |
| "grad_norm": 2.880690097808838, | |
| "learning_rate": 9.828657987371301e-06, | |
| "loss": 0.2291, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.7032720909886264, | |
| "grad_norm": 4.929702281951904, | |
| "learning_rate": 9.826111805297409e-06, | |
| "loss": 0.2338, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.705511811023622, | |
| "grad_norm": 1.8025418519973755, | |
| "learning_rate": 9.823547178793775e-06, | |
| "loss": 0.2087, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.7077515310586177, | |
| "grad_norm": 10.105000495910645, | |
| "learning_rate": 9.820964117661888e-06, | |
| "loss": 0.2376, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.7099912510936133, | |
| "grad_norm": 10.464491844177246, | |
| "learning_rate": 9.818362631773685e-06, | |
| "loss": 0.2162, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.7122309711286089, | |
| "grad_norm": 1.2578132152557373, | |
| "learning_rate": 9.815742731071524e-06, | |
| "loss": 0.2297, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.7144706911636045, | |
| "grad_norm": 7.663752555847168, | |
| "learning_rate": 9.813104425568138e-06, | |
| "loss": 0.2151, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.7167104111986001, | |
| "grad_norm": 3.7501728534698486, | |
| "learning_rate": 9.810447725346604e-06, | |
| "loss": 0.2226, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.7189501312335957, | |
| "grad_norm": 7.99606466293335, | |
| "learning_rate": 9.807772640560292e-06, | |
| "loss": 0.2327, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.7211898512685915, | |
| "grad_norm": 4.222293376922607, | |
| "learning_rate": 9.805079181432842e-06, | |
| "loss": 0.2206, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.7234295713035871, | |
| "grad_norm": 6.806760311126709, | |
| "learning_rate": 9.802367358258114e-06, | |
| "loss": 0.2147, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.7256692913385827, | |
| "grad_norm": 4.147646427154541, | |
| "learning_rate": 9.799637181400147e-06, | |
| "loss": 0.2097, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.7279090113735783, | |
| "grad_norm": 1.1662570238113403, | |
| "learning_rate": 9.796888661293133e-06, | |
| "loss": 0.2096, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.730148731408574, | |
| "grad_norm": 6.412913799285889, | |
| "learning_rate": 9.794121808441361e-06, | |
| "loss": 0.2361, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.7323884514435696, | |
| "grad_norm": 1.2370624542236328, | |
| "learning_rate": 9.791336633419189e-06, | |
| "loss": 0.2041, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.7346281714785652, | |
| "grad_norm": 0.9865597486495972, | |
| "learning_rate": 9.788533146870995e-06, | |
| "loss": 0.2199, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.7368678915135608, | |
| "grad_norm": 4.455222129821777, | |
| "learning_rate": 9.78571135951114e-06, | |
| "loss": 0.22, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.7391076115485564, | |
| "grad_norm": 4.656614780426025, | |
| "learning_rate": 9.782871282123932e-06, | |
| "loss": 0.2182, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.741347331583552, | |
| "grad_norm": 3.1240029335021973, | |
| "learning_rate": 9.780012925563573e-06, | |
| "loss": 0.2307, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.7435870516185477, | |
| "grad_norm": 3.156013011932373, | |
| "learning_rate": 9.777136300754129e-06, | |
| "loss": 0.2141, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.7458267716535433, | |
| "grad_norm": 4.594078540802002, | |
| "learning_rate": 9.774241418689481e-06, | |
| "loss": 0.2338, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.7480664916885389, | |
| "grad_norm": 6.030776023864746, | |
| "learning_rate": 9.771328290433287e-06, | |
| "loss": 0.222, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.7503062117235345, | |
| "grad_norm": 3.0741419792175293, | |
| "learning_rate": 9.768396927118939e-06, | |
| "loss": 0.2051, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.7525459317585301, | |
| "grad_norm": 1.915945053100586, | |
| "learning_rate": 9.76544733994952e-06, | |
| "loss": 0.2241, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.7547856517935259, | |
| "grad_norm": 6.906998634338379, | |
| "learning_rate": 9.762479540197753e-06, | |
| "loss": 0.2111, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.7570253718285215, | |
| "grad_norm": 1.6207234859466553, | |
| "learning_rate": 9.75949353920598e-06, | |
| "loss": 0.2209, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.7592650918635171, | |
| "grad_norm": 1.919848084449768, | |
| "learning_rate": 9.756489348386093e-06, | |
| "loss": 0.2238, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.7615048118985127, | |
| "grad_norm": 1.796086072921753, | |
| "learning_rate": 9.753466979219507e-06, | |
| "loss": 0.2103, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.7637445319335083, | |
| "grad_norm": 1.6984485387802124, | |
| "learning_rate": 9.75042644325711e-06, | |
| "loss": 0.2229, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.765984251968504, | |
| "grad_norm": 4.693706035614014, | |
| "learning_rate": 9.747367752119216e-06, | |
| "loss": 0.222, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.7682239720034996, | |
| "grad_norm": 3.6621434688568115, | |
| "learning_rate": 9.744290917495535e-06, | |
| "loss": 0.2253, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.7704636920384952, | |
| "grad_norm": 4.475337505340576, | |
| "learning_rate": 9.741195951145105e-06, | |
| "loss": 0.2198, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.7727034120734908, | |
| "grad_norm": 6.554020404815674, | |
| "learning_rate": 9.738082864896267e-06, | |
| "loss": 0.2082, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.7749431321084864, | |
| "grad_norm": 1.775171160697937, | |
| "learning_rate": 9.734951670646612e-06, | |
| "loss": 0.2345, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.777182852143482, | |
| "grad_norm": 10.973596572875977, | |
| "learning_rate": 9.731802380362936e-06, | |
| "loss": 0.2201, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.7794225721784777, | |
| "grad_norm": 3.4094762802124023, | |
| "learning_rate": 9.728635006081191e-06, | |
| "loss": 0.2299, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.7816622922134733, | |
| "grad_norm": 3.6424269676208496, | |
| "learning_rate": 9.725449559906448e-06, | |
| "loss": 0.2235, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.7839020122484689, | |
| "grad_norm": 4.4308319091796875, | |
| "learning_rate": 9.72224605401284e-06, | |
| "loss": 0.2117, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.7861417322834645, | |
| "grad_norm": 5.900296688079834, | |
| "learning_rate": 9.719024500643526e-06, | |
| "loss": 0.2298, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.7883814523184602, | |
| "grad_norm": 4.9092020988464355, | |
| "learning_rate": 9.715784912110632e-06, | |
| "loss": 0.2181, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.7906211723534559, | |
| "grad_norm": 3.5412979125976562, | |
| "learning_rate": 9.712527300795218e-06, | |
| "loss": 0.2213, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.7928608923884515, | |
| "grad_norm": 2.001018524169922, | |
| "learning_rate": 9.709251679147217e-06, | |
| "loss": 0.2176, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.7951006124234471, | |
| "grad_norm": 1.459351897239685, | |
| "learning_rate": 9.705958059685399e-06, | |
| "loss": 0.212, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.7973403324584427, | |
| "grad_norm": 2.8420157432556152, | |
| "learning_rate": 9.702646454997315e-06, | |
| "loss": 0.2299, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.7995800524934383, | |
| "grad_norm": 3.9574944972991943, | |
| "learning_rate": 9.699316877739253e-06, | |
| "loss": 0.204, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.801819772528434, | |
| "grad_norm": 1.4476044178009033, | |
| "learning_rate": 9.695969340636189e-06, | |
| "loss": 0.2102, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.8040594925634296, | |
| "grad_norm": 2.721195936203003, | |
| "learning_rate": 9.692603856481736e-06, | |
| "loss": 0.2274, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.8062992125984252, | |
| "grad_norm": 3.3109710216522217, | |
| "learning_rate": 9.689220438138099e-06, | |
| "loss": 0.2086, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.8085389326334208, | |
| "grad_norm": 6.931108474731445, | |
| "learning_rate": 9.685819098536024e-06, | |
| "loss": 0.223, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.8107786526684164, | |
| "grad_norm": 5.233217239379883, | |
| "learning_rate": 9.682399850674745e-06, | |
| "loss": 0.2303, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.813018372703412, | |
| "grad_norm": 1.7679535150527954, | |
| "learning_rate": 9.678962707621944e-06, | |
| "loss": 0.2211, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.8152580927384077, | |
| "grad_norm": 2.661332368850708, | |
| "learning_rate": 9.675507682513687e-06, | |
| "loss": 0.2066, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.8174978127734033, | |
| "grad_norm": 0.9035616517066956, | |
| "learning_rate": 9.67203478855439e-06, | |
| "loss": 0.2112, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.819737532808399, | |
| "grad_norm": 4.500796318054199, | |
| "learning_rate": 9.668544039016754e-06, | |
| "loss": 0.2215, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.8219772528433946, | |
| "grad_norm": 1.4607346057891846, | |
| "learning_rate": 9.665035447241721e-06, | |
| "loss": 0.2043, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.8242169728783902, | |
| "grad_norm": 1.0367200374603271, | |
| "learning_rate": 9.661509026638427e-06, | |
| "loss": 0.2284, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.8264566929133859, | |
| "grad_norm": 7.388817310333252, | |
| "learning_rate": 9.657964790684143e-06, | |
| "loss": 0.2177, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.8286964129483815, | |
| "grad_norm": 6.648113250732422, | |
| "learning_rate": 9.654402752924223e-06, | |
| "loss": 0.2108, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.8309361329833771, | |
| "grad_norm": 3.2974541187286377, | |
| "learning_rate": 9.650822926972064e-06, | |
| "loss": 0.2163, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.8331758530183727, | |
| "grad_norm": 1.5554214715957642, | |
| "learning_rate": 9.647225326509037e-06, | |
| "loss": 0.2127, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.8354155730533683, | |
| "grad_norm": 1.1680930852890015, | |
| "learning_rate": 9.643609965284452e-06, | |
| "loss": 0.21, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.837655293088364, | |
| "grad_norm": 3.262380599975586, | |
| "learning_rate": 9.639976857115492e-06, | |
| "loss": 0.2234, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.8398950131233596, | |
| "grad_norm": 1.6106623411178589, | |
| "learning_rate": 9.636326015887167e-06, | |
| "loss": 0.2102, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.8421347331583552, | |
| "grad_norm": 3.9737660884857178, | |
| "learning_rate": 9.632657455552258e-06, | |
| "loss": 0.214, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.8443744531933508, | |
| "grad_norm": 5.535833358764648, | |
| "learning_rate": 9.628971190131266e-06, | |
| "loss": 0.2171, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.8466141732283464, | |
| "grad_norm": 1.5001612901687622, | |
| "learning_rate": 9.625267233712357e-06, | |
| "loss": 0.2084, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.848853893263342, | |
| "grad_norm": 3.5858755111694336, | |
| "learning_rate": 9.621545600451308e-06, | |
| "loss": 0.2122, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.8510936132983377, | |
| "grad_norm": 2.433242082595825, | |
| "learning_rate": 9.617806304571455e-06, | |
| "loss": 0.203, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.8533333333333334, | |
| "grad_norm": 1.7443480491638184, | |
| "learning_rate": 9.614049360363632e-06, | |
| "loss": 0.2245, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.855573053368329, | |
| "grad_norm": 10.19050407409668, | |
| "learning_rate": 9.610274782186128e-06, | |
| "loss": 0.2178, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.8578127734033246, | |
| "grad_norm": 4.910433769226074, | |
| "learning_rate": 9.606482584464622e-06, | |
| "loss": 0.219, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.8600524934383202, | |
| "grad_norm": 4.34425687789917, | |
| "learning_rate": 9.602672781692133e-06, | |
| "loss": 0.2109, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.8622922134733159, | |
| "grad_norm": 1.3024218082427979, | |
| "learning_rate": 9.598845388428957e-06, | |
| "loss": 0.1945, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.8645319335083115, | |
| "grad_norm": 3.875014066696167, | |
| "learning_rate": 9.59500041930263e-06, | |
| "loss": 0.2177, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.8667716535433071, | |
| "grad_norm": 6.025304317474365, | |
| "learning_rate": 9.591137889007843e-06, | |
| "loss": 0.2087, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.8690113735783027, | |
| "grad_norm": 1.9392833709716797, | |
| "learning_rate": 9.587257812306417e-06, | |
| "loss": 0.2315, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.8712510936132983, | |
| "grad_norm": 1.3517054319381714, | |
| "learning_rate": 9.583360204027224e-06, | |
| "loss": 0.2164, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.873490813648294, | |
| "grad_norm": 2.300626039505005, | |
| "learning_rate": 9.579445079066136e-06, | |
| "loss": 0.2034, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.8757305336832896, | |
| "grad_norm": 6.352007865905762, | |
| "learning_rate": 9.57551245238598e-06, | |
| "loss": 0.2157, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.8779702537182852, | |
| "grad_norm": 8.510796546936035, | |
| "learning_rate": 9.571562339016463e-06, | |
| "loss": 0.2137, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.8802099737532808, | |
| "grad_norm": 1.9818949699401855, | |
| "learning_rate": 9.567594754054122e-06, | |
| "loss": 0.2188, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.8824496937882764, | |
| "grad_norm": 5.979051113128662, | |
| "learning_rate": 9.563609712662274e-06, | |
| "loss": 0.2103, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.884689413823272, | |
| "grad_norm": 4.557352066040039, | |
| "learning_rate": 9.559607230070943e-06, | |
| "loss": 0.2065, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.8869291338582678, | |
| "grad_norm": 1.7298095226287842, | |
| "learning_rate": 9.555587321576816e-06, | |
| "loss": 0.2199, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.8891688538932634, | |
| "grad_norm": 7.868046760559082, | |
| "learning_rate": 9.551550002543172e-06, | |
| "loss": 0.2195, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.891408573928259, | |
| "grad_norm": 4.001582145690918, | |
| "learning_rate": 9.547495288399837e-06, | |
| "loss": 0.2244, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.8936482939632546, | |
| "grad_norm": 5.262408256530762, | |
| "learning_rate": 9.543423194643113e-06, | |
| "loss": 0.21, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.8958880139982502, | |
| "grad_norm": 1.7287728786468506, | |
| "learning_rate": 9.539333736835723e-06, | |
| "loss": 0.2081, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.8958880139982502, | |
| "eval_loss": 0.21423038840293884, | |
| "eval_runtime": 508.214, | |
| "eval_samples_per_second": 236.743, | |
| "eval_steps_per_second": 14.797, | |
| "eval_token_accuracy": 0.7443897795757539, | |
| "step": 4000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 17856, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 4, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.8984694308677353e+19, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |