| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.999474513925381, | |
| "eval_steps": 500, | |
| "global_step": 951, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0010509721492380452, | |
| "grad_norm": 67.45640910506796, | |
| "learning_rate": 3.125e-08, | |
| "loss": 2.5156, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.005254860746190226, | |
| "grad_norm": 75.55100712691016, | |
| "learning_rate": 1.5625e-07, | |
| "loss": 2.5354, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.010509721492380452, | |
| "grad_norm": 66.60203909118349, | |
| "learning_rate": 3.125e-07, | |
| "loss": 2.5407, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.015764582238570676, | |
| "grad_norm": 36.16907230722729, | |
| "learning_rate": 4.6875e-07, | |
| "loss": 2.1943, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.021019442984760904, | |
| "grad_norm": 16.934219590193187, | |
| "learning_rate": 6.25e-07, | |
| "loss": 1.7895, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02627430373095113, | |
| "grad_norm": 6.67698023522737, | |
| "learning_rate": 7.8125e-07, | |
| "loss": 1.4917, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03152916447714135, | |
| "grad_norm": 3.725999126673678, | |
| "learning_rate": 9.375e-07, | |
| "loss": 1.3425, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03678402522333158, | |
| "grad_norm": 2.150496113400007, | |
| "learning_rate": 1.09375e-06, | |
| "loss": 1.2571, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.04203888596952181, | |
| "grad_norm": 1.409974548672782, | |
| "learning_rate": 1.25e-06, | |
| "loss": 1.208, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.047293746715712036, | |
| "grad_norm": 1.1758035521958887, | |
| "learning_rate": 1.40625e-06, | |
| "loss": 1.1701, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.05254860746190226, | |
| "grad_norm": 1.0288132651204651, | |
| "learning_rate": 1.5625e-06, | |
| "loss": 1.1422, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.057803468208092484, | |
| "grad_norm": 1.0269817770392966, | |
| "learning_rate": 1.7187499999999998e-06, | |
| "loss": 1.1347, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.0630583289542827, | |
| "grad_norm": 1.1632025269193185, | |
| "learning_rate": 1.875e-06, | |
| "loss": 1.1252, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06831318970047294, | |
| "grad_norm": 0.973286786188442, | |
| "learning_rate": 2.0312500000000002e-06, | |
| "loss": 1.1206, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07356805044666316, | |
| "grad_norm": 1.1374146499942284, | |
| "learning_rate": 2.1875e-06, | |
| "loss": 1.123, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0788229111928534, | |
| "grad_norm": 0.9559677259256227, | |
| "learning_rate": 2.3437500000000002e-06, | |
| "loss": 1.105, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08407777193904362, | |
| "grad_norm": 1.0626129431033882, | |
| "learning_rate": 2.5e-06, | |
| "loss": 1.0969, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08933263268523384, | |
| "grad_norm": 0.9767375057324753, | |
| "learning_rate": 2.6562499999999998e-06, | |
| "loss": 1.1099, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.09458749343142407, | |
| "grad_norm": 0.9240797677725251, | |
| "learning_rate": 2.8125e-06, | |
| "loss": 1.1119, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09984235417761429, | |
| "grad_norm": 0.9401234680151226, | |
| "learning_rate": 2.96875e-06, | |
| "loss": 1.0926, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.10509721492380451, | |
| "grad_norm": 1.0117170886130586, | |
| "learning_rate": 2.9998379903275155e-06, | |
| "loss": 1.1108, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.11035207566999475, | |
| "grad_norm": 0.906919049524167, | |
| "learning_rate": 2.9991798860113893e-06, | |
| "loss": 1.1162, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.11560693641618497, | |
| "grad_norm": 0.9141362173846742, | |
| "learning_rate": 2.998015783397426e-06, | |
| "loss": 1.1091, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.1208617971623752, | |
| "grad_norm": 0.9619396255470063, | |
| "learning_rate": 2.9963460753897363e-06, | |
| "loss": 1.0962, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.1261166579085654, | |
| "grad_norm": 1.0465936968149612, | |
| "learning_rate": 2.994171325542714e-06, | |
| "loss": 1.0911, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.13137151865475566, | |
| "grad_norm": 0.9530685775511196, | |
| "learning_rate": 2.991492267870822e-06, | |
| "loss": 1.0917, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.13662637940094588, | |
| "grad_norm": 0.9008971542587714, | |
| "learning_rate": 2.9883098066008556e-06, | |
| "loss": 1.0879, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1418812401471361, | |
| "grad_norm": 1.010852514785442, | |
| "learning_rate": 2.984625015866745e-06, | |
| "loss": 1.086, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.14713610089332632, | |
| "grad_norm": 0.8992929518552587, | |
| "learning_rate": 2.9804391393470235e-06, | |
| "loss": 1.1009, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.15239096163951654, | |
| "grad_norm": 0.9062761866531701, | |
| "learning_rate": 2.975753589845059e-06, | |
| "loss": 1.1019, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.1576458223857068, | |
| "grad_norm": 0.8610097516319934, | |
| "learning_rate": 2.970569948812214e-06, | |
| "loss": 1.0691, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.162900683131897, | |
| "grad_norm": 0.8872085969729788, | |
| "learning_rate": 2.9648899658140767e-06, | |
| "loss": 1.0862, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.16815554387808723, | |
| "grad_norm": 1.0208916358421982, | |
| "learning_rate": 2.9587155579399543e-06, | |
| "loss": 1.0823, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.17341040462427745, | |
| "grad_norm": 1.000083341373154, | |
| "learning_rate": 2.9520488091558225e-06, | |
| "loss": 1.0771, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.17866526537046767, | |
| "grad_norm": 0.9057099612333195, | |
| "learning_rate": 2.944891969600953e-06, | |
| "loss": 1.0875, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.1839201261166579, | |
| "grad_norm": 0.9155112430806412, | |
| "learning_rate": 2.9372474548284537e-06, | |
| "loss": 1.095, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.18917498686284814, | |
| "grad_norm": 0.8983015781319096, | |
| "learning_rate": 2.9291178449899786e-06, | |
| "loss": 1.0934, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.19442984760903836, | |
| "grad_norm": 0.9013247242472799, | |
| "learning_rate": 2.920505883964884e-06, | |
| "loss": 1.0917, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.19968470835522859, | |
| "grad_norm": 0.892018911723705, | |
| "learning_rate": 2.9114144784341226e-06, | |
| "loss": 1.0854, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2049395691014188, | |
| "grad_norm": 0.9231553972162896, | |
| "learning_rate": 2.9018466968991914e-06, | |
| "loss": 1.0749, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.21019442984760903, | |
| "grad_norm": 0.8456793004463601, | |
| "learning_rate": 2.8918057686464587e-06, | |
| "loss": 1.0823, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.21544929059379928, | |
| "grad_norm": 0.8761446392793291, | |
| "learning_rate": 2.881295082657229e-06, | |
| "loss": 1.0769, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.2207041513399895, | |
| "grad_norm": 0.8860527749104554, | |
| "learning_rate": 2.8703181864639013e-06, | |
| "loss": 1.0925, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.22595901208617972, | |
| "grad_norm": 0.8708280345626856, | |
| "learning_rate": 2.8588787849526228e-06, | |
| "loss": 1.0731, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.23121387283236994, | |
| "grad_norm": 0.8688347790794807, | |
| "learning_rate": 2.846980739112822e-06, | |
| "loss": 1.0749, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.23646873357856016, | |
| "grad_norm": 0.8729839981153089, | |
| "learning_rate": 2.834628064734065e-06, | |
| "loss": 1.0697, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.2417235943247504, | |
| "grad_norm": 0.8993877107760438, | |
| "learning_rate": 2.821824931050655e-06, | |
| "loss": 1.0751, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.24697845507094063, | |
| "grad_norm": 0.8452099155218541, | |
| "learning_rate": 2.8085756593344505e-06, | |
| "loss": 1.07, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.2522333158171308, | |
| "grad_norm": 0.8798731633285007, | |
| "learning_rate": 2.794884721436361e-06, | |
| "loss": 1.0655, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.25748817656332107, | |
| "grad_norm": 0.895358087891481, | |
| "learning_rate": 2.780756738277021e-06, | |
| "loss": 1.0983, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2627430373095113, | |
| "grad_norm": 0.8609931821431536, | |
| "learning_rate": 2.766196478287156e-06, | |
| "loss": 1.0907, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2679978980557015, | |
| "grad_norm": 0.8553591777726404, | |
| "learning_rate": 2.751208855798155e-06, | |
| "loss": 1.0801, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.27325275880189176, | |
| "grad_norm": 0.8654715123002859, | |
| "learning_rate": 2.7357989293834005e-06, | |
| "loss": 1.082, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.27850761954808195, | |
| "grad_norm": 0.8697051687346281, | |
| "learning_rate": 2.7199719001509175e-06, | |
| "loss": 1.0748, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.2837624802942722, | |
| "grad_norm": 0.880312866484559, | |
| "learning_rate": 2.7037331099879117e-06, | |
| "loss": 1.0878, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.28901734104046245, | |
| "grad_norm": 0.8452698137198734, | |
| "learning_rate": 2.687088039757792e-06, | |
| "loss": 1.0797, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.29427220178665264, | |
| "grad_norm": 0.9791182527762319, | |
| "learning_rate": 2.6700423074502888e-06, | |
| "loss": 1.0717, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2995270625328429, | |
| "grad_norm": 0.8645394739775393, | |
| "learning_rate": 2.652601666285289e-06, | |
| "loss": 1.114, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.3047819232790331, | |
| "grad_norm": 0.9004251617653912, | |
| "learning_rate": 2.6347720027710253e-06, | |
| "loss": 1.067, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.31003678402522333, | |
| "grad_norm": 0.872604926778432, | |
| "learning_rate": 2.6165593347172837e-06, | |
| "loss": 1.0732, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3152916447714136, | |
| "grad_norm": 0.8929460807825601, | |
| "learning_rate": 2.5979698092042925e-06, | |
| "loss": 1.0876, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3205465055176038, | |
| "grad_norm": 0.8588249540629153, | |
| "learning_rate": 2.5790097005079765e-06, | |
| "loss": 1.0897, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.325801366263794, | |
| "grad_norm": 0.8727624932776766, | |
| "learning_rate": 2.559685407982288e-06, | |
| "loss": 1.0666, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3310562270099842, | |
| "grad_norm": 0.933192018274695, | |
| "learning_rate": 2.5400034538993135e-06, | |
| "loss": 1.0868, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.33631108775617446, | |
| "grad_norm": 0.9302912982362859, | |
| "learning_rate": 2.519970481247901e-06, | |
| "loss": 1.0518, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3415659485023647, | |
| "grad_norm": 0.8713971326878898, | |
| "learning_rate": 2.4995932514915404e-06, | |
| "loss": 1.0683, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.3468208092485549, | |
| "grad_norm": 0.8822615502295114, | |
| "learning_rate": 2.478878642286253e-06, | |
| "loss": 1.0644, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.35207566999474516, | |
| "grad_norm": 0.8368326296787831, | |
| "learning_rate": 2.4578336451592705e-06, | |
| "loss": 1.0811, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.35733053074093535, | |
| "grad_norm": 0.8544515279738967, | |
| "learning_rate": 2.4364653631492774e-06, | |
| "loss": 1.0623, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3625853914871256, | |
| "grad_norm": 0.891511057734901, | |
| "learning_rate": 2.414781008409014e-06, | |
| "loss": 1.0737, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.3678402522333158, | |
| "grad_norm": 0.9052547289541356, | |
| "learning_rate": 2.3927878997710575e-06, | |
| "loss": 1.0981, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.37309511297950604, | |
| "grad_norm": 0.8951487470408807, | |
| "learning_rate": 2.3704934602775926e-06, | |
| "loss": 1.0827, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.3783499737256963, | |
| "grad_norm": 0.9097434855008588, | |
| "learning_rate": 2.347905214675008e-06, | |
| "loss": 1.0713, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3836048344718865, | |
| "grad_norm": 0.8583189613291908, | |
| "learning_rate": 2.3250307868741717e-06, | |
| "loss": 1.0706, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.38885969521807673, | |
| "grad_norm": 0.8216502318444575, | |
| "learning_rate": 2.3018778973772334e-06, | |
| "loss": 1.0573, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3941145559642669, | |
| "grad_norm": 1.0166338065086127, | |
| "learning_rate": 2.278454360671823e-06, | |
| "loss": 1.0865, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.39936941671045717, | |
| "grad_norm": 0.8685946998953423, | |
| "learning_rate": 2.2547680825935325e-06, | |
| "loss": 1.0852, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.4046242774566474, | |
| "grad_norm": 0.8206385124004496, | |
| "learning_rate": 2.2308270576575657e-06, | |
| "loss": 1.0683, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.4098791382028376, | |
| "grad_norm": 0.8627531067721118, | |
| "learning_rate": 2.206639366360451e-06, | |
| "loss": 1.0885, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.41513399894902786, | |
| "grad_norm": 0.8850142996528242, | |
| "learning_rate": 2.1822131724527425e-06, | |
| "loss": 1.0588, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.42038885969521805, | |
| "grad_norm": 0.8294172803163861, | |
| "learning_rate": 2.157556720183616e-06, | |
| "loss": 1.0542, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4256437204414083, | |
| "grad_norm": 0.8307870053309333, | |
| "learning_rate": 2.1326783315182984e-06, | |
| "loss": 1.0666, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.43089858118759855, | |
| "grad_norm": 0.8178946165962943, | |
| "learning_rate": 2.1075864033292623e-06, | |
| "loss": 1.0725, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.43615344193378874, | |
| "grad_norm": 0.8236704022077929, | |
| "learning_rate": 2.082289404562144e-06, | |
| "loss": 1.0769, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.441408302679979, | |
| "grad_norm": 0.8321193181865734, | |
| "learning_rate": 2.0567958733773313e-06, | |
| "loss": 1.0613, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.4466631634261692, | |
| "grad_norm": 0.8122682546804041, | |
| "learning_rate": 2.0311144142681904e-06, | |
| "loss": 1.0642, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.45191802417235943, | |
| "grad_norm": 0.8236878035013919, | |
| "learning_rate": 2.005253695156909e-06, | |
| "loss": 1.0472, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4571728849185497, | |
| "grad_norm": 0.8493608509140299, | |
| "learning_rate": 1.9792224444689222e-06, | |
| "loss": 1.0616, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.4624277456647399, | |
| "grad_norm": 0.842399913675415, | |
| "learning_rate": 1.9530294481869286e-06, | |
| "loss": 1.0803, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4676826064109301, | |
| "grad_norm": 0.8657084634018396, | |
| "learning_rate": 1.926683546885469e-06, | |
| "loss": 1.0588, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4729374671571203, | |
| "grad_norm": 0.8242451456404312, | |
| "learning_rate": 1.9001936327470894e-06, | |
| "loss": 1.0709, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.47819232790331057, | |
| "grad_norm": 0.8243645563523258, | |
| "learning_rate": 1.873568646561075e-06, | |
| "loss": 1.0674, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.4834471886495008, | |
| "grad_norm": 0.8343407072556835, | |
| "learning_rate": 1.8468175747057898e-06, | |
| "loss": 1.0748, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.488702049395691, | |
| "grad_norm": 0.8704271811597876, | |
| "learning_rate": 1.8199494461156204e-06, | |
| "loss": 1.0533, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.49395691014188126, | |
| "grad_norm": 0.869601199009106, | |
| "learning_rate": 1.7929733292335591e-06, | |
| "loss": 1.0733, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.49921177088807145, | |
| "grad_norm": 0.8251688330803062, | |
| "learning_rate": 1.765898328950455e-06, | |
| "loss": 1.0651, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.5044666316342616, | |
| "grad_norm": 0.8182570293593837, | |
| "learning_rate": 1.738733583531959e-06, | |
| "loss": 1.0802, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.509721492380452, | |
| "grad_norm": 0.861721166977276, | |
| "learning_rate": 1.7114882615342073e-06, | |
| "loss": 1.056, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.5149763531266421, | |
| "grad_norm": 0.8025950431569188, | |
| "learning_rate": 1.6841715587092798e-06, | |
| "loss": 1.0785, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5202312138728323, | |
| "grad_norm": 0.8462319262578287, | |
| "learning_rate": 1.6567926949014804e-06, | |
| "loss": 1.0745, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.5254860746190226, | |
| "grad_norm": 0.8532567164974908, | |
| "learning_rate": 1.6293609109354836e-06, | |
| "loss": 1.0611, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5307409353652128, | |
| "grad_norm": 0.8238719502721198, | |
| "learning_rate": 1.601885465497404e-06, | |
| "loss": 1.0651, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.535995796111403, | |
| "grad_norm": 0.8290145975340624, | |
| "learning_rate": 1.5743756320098334e-06, | |
| "loss": 1.0643, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5412506568575933, | |
| "grad_norm": 0.814187446370962, | |
| "learning_rate": 1.5468406955019059e-06, | |
| "loss": 1.0571, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.5465055176037835, | |
| "grad_norm": 0.8640835720468265, | |
| "learning_rate": 1.5192899494754443e-06, | |
| "loss": 1.0731, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5517603783499737, | |
| "grad_norm": 0.8096973874507514, | |
| "learning_rate": 1.4917326927682494e-06, | |
| "loss": 1.0705, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.5570152390961639, | |
| "grad_norm": 0.8060488155757439, | |
| "learning_rate": 1.4641782264155852e-06, | |
| "loss": 1.0738, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5622700998423542, | |
| "grad_norm": 0.8272465004554679, | |
| "learning_rate": 1.4366358505109237e-06, | |
| "loss": 1.0857, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5675249605885444, | |
| "grad_norm": 0.808405985194556, | |
| "learning_rate": 1.4091148610670098e-06, | |
| "loss": 1.0699, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5727798213347346, | |
| "grad_norm": 0.8401268224107731, | |
| "learning_rate": 1.3816245468782988e-06, | |
| "loss": 1.0601, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5780346820809249, | |
| "grad_norm": 0.8212169210412936, | |
| "learning_rate": 1.3541741863858352e-06, | |
| "loss": 1.0534, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5832895428271151, | |
| "grad_norm": 0.909077361069008, | |
| "learning_rate": 1.326773044545621e-06, | |
| "loss": 1.0672, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5885444035733053, | |
| "grad_norm": 0.8144193123335469, | |
| "learning_rate": 1.299430369701541e-06, | |
| "loss": 1.058, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5937992643194955, | |
| "grad_norm": 0.8024091331374241, | |
| "learning_rate": 1.272155390463889e-06, | |
| "loss": 1.084, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5990541250656858, | |
| "grad_norm": 0.8132457228764496, | |
| "learning_rate": 1.2449573125945607e-06, | |
| "loss": 1.0677, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.604308985811876, | |
| "grad_norm": 0.8142277094157844, | |
| "learning_rate": 1.2178453158999509e-06, | |
| "loss": 1.0706, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.6095638465580662, | |
| "grad_norm": 0.7999185759745931, | |
| "learning_rate": 1.1908285511326195e-06, | |
| "loss": 1.056, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.6148187073042565, | |
| "grad_norm": 0.8042827578821733, | |
| "learning_rate": 1.1639161369027564e-06, | |
| "loss": 1.0544, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.6200735680504467, | |
| "grad_norm": 0.7974533954875164, | |
| "learning_rate": 1.1371171566004986e-06, | |
| "loss": 1.0508, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6253284287966369, | |
| "grad_norm": 0.7901099674485127, | |
| "learning_rate": 1.1104406553301357e-06, | |
| "loss": 1.0636, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.6305832895428272, | |
| "grad_norm": 0.8076939152475928, | |
| "learning_rate": 1.0838956368572335e-06, | |
| "loss": 1.0526, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6358381502890174, | |
| "grad_norm": 0.799554008565015, | |
| "learning_rate": 1.0574910605697135e-06, | |
| "loss": 1.0715, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.6410930110352075, | |
| "grad_norm": 0.7979721444226595, | |
| "learning_rate": 1.03123583845391e-06, | |
| "loss": 1.0692, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.6463478717813977, | |
| "grad_norm": 0.8069555984252168, | |
| "learning_rate": 1.0051388320866258e-06, | |
| "loss": 1.0583, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.651602732527588, | |
| "grad_norm": 0.8547163693801487, | |
| "learning_rate": 9.792088496441992e-07, | |
| "loss": 1.082, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6568575932737782, | |
| "grad_norm": 0.7889726381679567, | |
| "learning_rate": 9.53454642929601e-07, | |
| "loss": 1.0772, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.6621124540199684, | |
| "grad_norm": 0.8667983764401236, | |
| "learning_rate": 9.278849044185509e-07, | |
| "loss": 1.0636, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.6673673147661587, | |
| "grad_norm": 0.8030729729500611, | |
| "learning_rate": 9.025082643256647e-07, | |
| "loss": 1.0431, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.6726221755123489, | |
| "grad_norm": 0.9156958348117665, | |
| "learning_rate": 8.77333287691609e-07, | |
| "loss": 1.0585, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6778770362585391, | |
| "grad_norm": 0.8108123796700908, | |
| "learning_rate": 8.523684714922608e-07, | |
| "loss": 1.0742, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6831318970047294, | |
| "grad_norm": 0.8023252184856209, | |
| "learning_rate": 8.276222417708309e-07, | |
| "loss": 1.0557, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6883867577509196, | |
| "grad_norm": 0.8088733267219154, | |
| "learning_rate": 8.031029507939401e-07, | |
| "loss": 1.0549, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.6936416184971098, | |
| "grad_norm": 0.7972317042691273, | |
| "learning_rate": 7.788188742325803e-07, | |
| "loss": 1.0615, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6988964792433, | |
| "grad_norm": 0.8305529275791657, | |
| "learning_rate": 7.547782083689479e-07, | |
| "loss": 1.0643, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.7041513399894903, | |
| "grad_norm": 0.8071361500941268, | |
| "learning_rate": 7.309890673300506e-07, | |
| "loss": 1.0452, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.7094062007356805, | |
| "grad_norm": 0.8100694184976672, | |
| "learning_rate": 7.074594803490618e-07, | |
| "loss": 1.0517, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.7146610614818707, | |
| "grad_norm": 0.8505474154250443, | |
| "learning_rate": 6.841973890553168e-07, | |
| "loss": 1.0612, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.719915922228061, | |
| "grad_norm": 0.8165179858505918, | |
| "learning_rate": 6.6121064479388e-07, | |
| "loss": 1.0365, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.7251707829742512, | |
| "grad_norm": 0.8100990471560885, | |
| "learning_rate": 6.385070059755846e-07, | |
| "loss": 1.0739, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7304256437204414, | |
| "grad_norm": 0.8184474658493306, | |
| "learning_rate": 6.160941354584404e-07, | |
| "loss": 1.0611, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.7356805044666316, | |
| "grad_norm": 0.8252657802847051, | |
| "learning_rate": 5.93979597961289e-07, | |
| "loss": 1.0505, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.7409353652128219, | |
| "grad_norm": 0.7762171975020372, | |
| "learning_rate": 5.721708575105861e-07, | |
| "loss": 1.0589, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.7461902259590121, | |
| "grad_norm": 0.8192139022265624, | |
| "learning_rate": 5.506752749211673e-07, | |
| "loss": 1.0546, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.7514450867052023, | |
| "grad_norm": 0.8083286951981082, | |
| "learning_rate": 5.295001053118499e-07, | |
| "loss": 1.0564, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.7566999474513926, | |
| "grad_norm": 0.792026256039253, | |
| "learning_rate": 5.086524956567084e-07, | |
| "loss": 1.0714, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.7619548081975828, | |
| "grad_norm": 0.7949670797179523, | |
| "learning_rate": 4.88139482372852e-07, | |
| "loss": 1.0375, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.767209668943773, | |
| "grad_norm": 0.9772289752161802, | |
| "learning_rate": 4.679679889455153e-07, | |
| "loss": 1.0526, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.7724645296899633, | |
| "grad_norm": 0.8027717879760866, | |
| "learning_rate": 4.4814482359126713e-07, | |
| "loss": 1.0819, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.7777193904361535, | |
| "grad_norm": 0.8018900351097608, | |
| "learning_rate": 4.2867667696012255e-07, | |
| "loss": 1.0627, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.7829742511823437, | |
| "grad_norm": 1.2040107178094928, | |
| "learning_rate": 4.0957011987733655e-07, | |
| "loss": 1.0759, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.7882291119285338, | |
| "grad_norm": 0.8007696827925082, | |
| "learning_rate": 3.908316011256419e-07, | |
| "loss": 1.0744, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7934839726747241, | |
| "grad_norm": 0.7873570653528454, | |
| "learning_rate": 3.7246744526867525e-07, | |
| "loss": 1.0568, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.7987388334209143, | |
| "grad_norm": 0.7923946383936664, | |
| "learning_rate": 3.5448385051633225e-07, | |
| "loss": 1.0464, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.8039936941671045, | |
| "grad_norm": 0.7863315246660161, | |
| "learning_rate": 3.368868866327678e-07, | |
| "loss": 1.0612, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.8092485549132948, | |
| "grad_norm": 1.479921699105945, | |
| "learning_rate": 3.1968249288774887e-07, | |
| "loss": 1.0605, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.814503415659485, | |
| "grad_norm": 0.8008655410629794, | |
| "learning_rate": 3.0287647605205155e-07, | |
| "loss": 1.0724, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.8197582764056752, | |
| "grad_norm": 0.8062826939041355, | |
| "learning_rate": 2.86474508437579e-07, | |
| "loss": 1.0579, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.8250131371518655, | |
| "grad_norm": 0.8029021628515398, | |
| "learning_rate": 2.704821259828608e-07, | |
| "loss": 1.0684, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.8302679978980557, | |
| "grad_norm": 0.8105469057959347, | |
| "learning_rate": 2.5490472638458195e-07, | |
| "loss": 1.0485, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.8355228586442459, | |
| "grad_norm": 0.7927228754247995, | |
| "learning_rate": 2.3974756727576886e-07, | |
| "loss": 1.0698, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.8407777193904361, | |
| "grad_norm": 0.7849174985906052, | |
| "learning_rate": 2.2501576445125077e-07, | |
| "loss": 1.0595, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.8460325801366264, | |
| "grad_norm": 0.8186938863488736, | |
| "learning_rate": 2.1071429014099365e-07, | |
| "loss": 1.0631, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.8512874408828166, | |
| "grad_norm": 0.7864326637111713, | |
| "learning_rate": 1.9684797133188865e-07, | |
| "loss": 1.0397, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.8565423016290068, | |
| "grad_norm": 0.7954693830348463, | |
| "learning_rate": 1.8342148813856414e-07, | |
| "loss": 1.0498, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.8617971623751971, | |
| "grad_norm": 0.8900900173262289, | |
| "learning_rate": 1.7043937222376766e-07, | |
| "loss": 1.0484, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.8670520231213873, | |
| "grad_norm": 0.8002588089694325, | |
| "learning_rate": 1.579060052688548e-07, | |
| "loss": 1.0676, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.8723068838675775, | |
| "grad_norm": 0.7947929611802326, | |
| "learning_rate": 1.4582561749489847e-07, | |
| "loss": 1.0658, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.8775617446137677, | |
| "grad_norm": 0.8288459681225425, | |
| "learning_rate": 1.3420228623491742e-07, | |
| "loss": 1.034, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.882816605359958, | |
| "grad_norm": 0.8346010909012226, | |
| "learning_rate": 1.2303993455770946e-07, | |
| "loss": 1.0679, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.8880714661061482, | |
| "grad_norm": 0.8436829522982748, | |
| "learning_rate": 1.1234232994374916e-07, | |
| "loss": 1.0561, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.8933263268523384, | |
| "grad_norm": 0.8025541491307114, | |
| "learning_rate": 1.0211308301360039e-07, | |
| "loss": 1.0635, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.8985811875985287, | |
| "grad_norm": 0.8305527668792659, | |
| "learning_rate": 9.235564630927196e-08, | |
| "loss": 1.0499, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.9038360483447189, | |
| "grad_norm": 0.798582022149453, | |
| "learning_rate": 8.307331312892601e-08, | |
| "loss": 1.0573, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.7805401930107103, | |
| "learning_rate": 7.426921641533562e-08, | |
| "loss": 1.0536, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.9143457698370994, | |
| "grad_norm": 0.7779984836642342, | |
| "learning_rate": 6.594632769846354e-08, | |
| "loss": 1.0605, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.9196006305832896, | |
| "grad_norm": 0.7849919719549066, | |
| "learning_rate": 5.810745609252166e-08, | |
| "loss": 1.048, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.9248554913294798, | |
| "grad_norm": 0.7974346377505517, | |
| "learning_rate": 5.0755247347847814e-08, | |
| "loss": 1.0503, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.9301103520756699, | |
| "grad_norm": 0.8239903131727321, | |
| "learning_rate": 4.389218295792002e-08, | |
| "loss": 1.065, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.9353652128218602, | |
| "grad_norm": 0.782976096757697, | |
| "learning_rate": 3.7520579321812186e-08, | |
| "loss": 1.0731, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.9406200735680504, | |
| "grad_norm": 0.7800809248031729, | |
| "learning_rate": 3.1642586962369765e-08, | |
| "loss": 1.0363, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.9458749343142406, | |
| "grad_norm": 0.7848631462241675, | |
| "learning_rate": 2.6260189800372757e-08, | |
| "loss": 1.0517, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.9511297950604309, | |
| "grad_norm": 0.7779665883276563, | |
| "learning_rate": 2.13752044849288e-08, | |
| "loss": 1.0626, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.9563846558066211, | |
| "grad_norm": 0.7949647408081179, | |
| "learning_rate": 1.698927978032383e-08, | |
| "loss": 1.0485, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.9616395165528113, | |
| "grad_norm": 0.7920892473556865, | |
| "learning_rate": 1.3103896009537208e-08, | |
| "loss": 1.0476, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.9668943772990016, | |
| "grad_norm": 0.7894649006593766, | |
| "learning_rate": 9.720364554606898e-09, | |
| "loss": 1.0554, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.9721492380451918, | |
| "grad_norm": 0.7926655176731906, | |
| "learning_rate": 6.839827414016675e-09, | |
| "loss": 1.0636, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.977404098791382, | |
| "grad_norm": 0.7738940217826289, | |
| "learning_rate": 4.463256817252792e-09, | |
| "loss": 1.0626, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.9826589595375722, | |
| "grad_norm": 0.7796303093908586, | |
| "learning_rate": 2.5914548966596285e-09, | |
| "loss": 1.0583, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.9879138202837625, | |
| "grad_norm": 0.7880712824013714, | |
| "learning_rate": 1.2250534167067561e-09, | |
| "loss": 1.0563, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.9931686810299527, | |
| "grad_norm": 0.7995046619036974, | |
| "learning_rate": 3.6451356075817287e-10, | |
| "loss": 1.0511, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.9984235417761429, | |
| "grad_norm": 0.7975204845224678, | |
| "learning_rate": 1.0125775414981941e-11, | |
| "loss": 1.0962, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.999474513925381, | |
| "eval_loss": 1.0638784170150757, | |
| "eval_runtime": 594.284, | |
| "eval_samples_per_second": 22.674, | |
| "eval_steps_per_second": 0.71, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.999474513925381, | |
| "step": 951, | |
| "total_flos": 905758069751808.0, | |
| "train_loss": 1.102385032064155, | |
| "train_runtime": 21864.5552, | |
| "train_samples_per_second": 5.57, | |
| "train_steps_per_second": 0.043 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 951, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 905758069751808.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |