Inheritance-Meaning / trainer_state.json
Pclanglais's picture
Upload folder using huggingface_hub
a028c09 verified
{
"best_global_step": 166236,
"best_metric": 0.7059909142532652,
"best_model_checkpoint": "modernbert-heritage-classification/checkpoint-166236",
"epoch": 3.0,
"eval_steps": 500,
"global_step": 249354,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0060155441661252675,
"grad_norm": 3.192460536956787,
"learning_rate": 1.996005678673693e-05,
"loss": 0.4511,
"step": 500
},
{
"epoch": 0.012031088332250535,
"grad_norm": 2.3000173568725586,
"learning_rate": 1.992003336621831e-05,
"loss": 0.4198,
"step": 1000
},
{
"epoch": 0.018046632498375802,
"grad_norm": 6.538833141326904,
"learning_rate": 1.9879929738444142e-05,
"loss": 0.4086,
"step": 1500
},
{
"epoch": 0.02406217666450107,
"grad_norm": 0.18448229134082794,
"learning_rate": 1.9839826110669975e-05,
"loss": 0.3574,
"step": 2000
},
{
"epoch": 0.030077720830626337,
"grad_norm": 5.782385349273682,
"learning_rate": 1.9799722482895804e-05,
"loss": 0.3798,
"step": 2500
},
{
"epoch": 0.036093264996751605,
"grad_norm": 2.290853500366211,
"learning_rate": 1.9759699062377184e-05,
"loss": 0.38,
"step": 3000
},
{
"epoch": 0.042108809162876876,
"grad_norm": 4.156062126159668,
"learning_rate": 1.9719595434603017e-05,
"loss": 0.3838,
"step": 3500
},
{
"epoch": 0.04812435332900214,
"grad_norm": 2.1094439029693604,
"learning_rate": 1.967949180682885e-05,
"loss": 0.3625,
"step": 4000
},
{
"epoch": 0.05413989749512741,
"grad_norm": 8.78498363494873,
"learning_rate": 1.963938817905468e-05,
"loss": 0.3834,
"step": 4500
},
{
"epoch": 0.060155441661252675,
"grad_norm": 0.003325940575450659,
"learning_rate": 1.959928455128051e-05,
"loss": 0.3395,
"step": 5000
},
{
"epoch": 0.06617098582737795,
"grad_norm": 0.030467336997389793,
"learning_rate": 1.9559180923506344e-05,
"loss": 0.3605,
"step": 5500
},
{
"epoch": 0.07218652999350321,
"grad_norm": 0.7140659689903259,
"learning_rate": 1.9519077295732173e-05,
"loss": 0.352,
"step": 6000
},
{
"epoch": 0.07820207415962847,
"grad_norm": 0.013019168749451637,
"learning_rate": 1.9479053875213553e-05,
"loss": 0.3533,
"step": 6500
},
{
"epoch": 0.08421761832575375,
"grad_norm": 3.8174266815185547,
"learning_rate": 1.9438950247439386e-05,
"loss": 0.316,
"step": 7000
},
{
"epoch": 0.09023316249187902,
"grad_norm": 7.174667835235596,
"learning_rate": 1.939884661966522e-05,
"loss": 0.3495,
"step": 7500
},
{
"epoch": 0.09624870665800428,
"grad_norm": 1.6871285438537598,
"learning_rate": 1.9358742991891048e-05,
"loss": 0.3449,
"step": 8000
},
{
"epoch": 0.10226425082412956,
"grad_norm": 0.04142903909087181,
"learning_rate": 1.931863936411688e-05,
"loss": 0.3212,
"step": 8500
},
{
"epoch": 0.10827979499025482,
"grad_norm": 0.0040322355926036835,
"learning_rate": 1.9278535736342713e-05,
"loss": 0.3416,
"step": 9000
},
{
"epoch": 0.11429533915638009,
"grad_norm": 7.649967193603516,
"learning_rate": 1.9238512315824093e-05,
"loss": 0.3615,
"step": 9500
},
{
"epoch": 0.12031088332250535,
"grad_norm": 2.4331884384155273,
"learning_rate": 1.9198408688049922e-05,
"loss": 0.354,
"step": 10000
},
{
"epoch": 0.12632642748863063,
"grad_norm": 8.657504081726074,
"learning_rate": 1.9158305060275755e-05,
"loss": 0.3413,
"step": 10500
},
{
"epoch": 0.1323419716547559,
"grad_norm": 8.941267013549805,
"learning_rate": 1.9118201432501588e-05,
"loss": 0.3211,
"step": 11000
},
{
"epoch": 0.13835751582088116,
"grad_norm": 16.12137794494629,
"learning_rate": 1.9078178011982968e-05,
"loss": 0.3323,
"step": 11500
},
{
"epoch": 0.14437305998700642,
"grad_norm": 0.30122852325439453,
"learning_rate": 1.9038074384208797e-05,
"loss": 0.3163,
"step": 12000
},
{
"epoch": 0.15038860415313168,
"grad_norm": 0.2980821132659912,
"learning_rate": 1.899797075643463e-05,
"loss": 0.3264,
"step": 12500
},
{
"epoch": 0.15640414831925695,
"grad_norm": 0.006154663860797882,
"learning_rate": 1.8957867128660462e-05,
"loss": 0.3453,
"step": 13000
},
{
"epoch": 0.16241969248538224,
"grad_norm": 0.8030881285667419,
"learning_rate": 1.8917763500886292e-05,
"loss": 0.3152,
"step": 13500
},
{
"epoch": 0.1684352366515075,
"grad_norm": 17.813459396362305,
"learning_rate": 1.8877659873112124e-05,
"loss": 0.3189,
"step": 14000
},
{
"epoch": 0.17445078081763277,
"grad_norm": 1.1400656700134277,
"learning_rate": 1.8837556245337957e-05,
"loss": 0.3434,
"step": 14500
},
{
"epoch": 0.18046632498375803,
"grad_norm": 0.09848301857709885,
"learning_rate": 1.8797452617563786e-05,
"loss": 0.3087,
"step": 15000
},
{
"epoch": 0.1864818691498833,
"grad_norm": 0.04860129952430725,
"learning_rate": 1.875734898978962e-05,
"loss": 0.3289,
"step": 15500
},
{
"epoch": 0.19249741331600856,
"grad_norm": 0.012527902610599995,
"learning_rate": 1.8717325569271e-05,
"loss": 0.3324,
"step": 16000
},
{
"epoch": 0.19851295748213382,
"grad_norm": 5.626002788543701,
"learning_rate": 1.867730214875238e-05,
"loss": 0.3351,
"step": 16500
},
{
"epoch": 0.20452850164825911,
"grad_norm": 0.2671683430671692,
"learning_rate": 1.8637198520978212e-05,
"loss": 0.3467,
"step": 17000
},
{
"epoch": 0.21054404581438438,
"grad_norm": 0.4044207036495209,
"learning_rate": 1.859709489320404e-05,
"loss": 0.3248,
"step": 17500
},
{
"epoch": 0.21655958998050964,
"grad_norm": 0.19454579055309296,
"learning_rate": 1.8556991265429874e-05,
"loss": 0.339,
"step": 18000
},
{
"epoch": 0.2225751341466349,
"grad_norm": 13.63963508605957,
"learning_rate": 1.8516887637655703e-05,
"loss": 0.3252,
"step": 18500
},
{
"epoch": 0.22859067831276017,
"grad_norm": 3.8693294525146484,
"learning_rate": 1.8476784009881536e-05,
"loss": 0.3311,
"step": 19000
},
{
"epoch": 0.23460622247888543,
"grad_norm": 2.644678831100464,
"learning_rate": 1.8436760589362916e-05,
"loss": 0.3378,
"step": 19500
},
{
"epoch": 0.2406217666450107,
"grad_norm": 1.1504566669464111,
"learning_rate": 1.8396656961588745e-05,
"loss": 0.3162,
"step": 20000
},
{
"epoch": 0.24663731081113596,
"grad_norm": 0.33002209663391113,
"learning_rate": 1.8356553333814578e-05,
"loss": 0.3112,
"step": 20500
},
{
"epoch": 0.25265285497726125,
"grad_norm": 5.884070873260498,
"learning_rate": 1.831644970604041e-05,
"loss": 0.3375,
"step": 21000
},
{
"epoch": 0.2586683991433865,
"grad_norm": 0.004158430732786655,
"learning_rate": 1.827634607826624e-05,
"loss": 0.2711,
"step": 21500
},
{
"epoch": 0.2646839433095118,
"grad_norm": 6.601622581481934,
"learning_rate": 1.8236242450492072e-05,
"loss": 0.3031,
"step": 22000
},
{
"epoch": 0.27069948747563705,
"grad_norm": 4.164157390594482,
"learning_rate": 1.8196138822717905e-05,
"loss": 0.3228,
"step": 22500
},
{
"epoch": 0.2767150316417623,
"grad_norm": 34.43812942504883,
"learning_rate": 1.8156035194943734e-05,
"loss": 0.3061,
"step": 23000
},
{
"epoch": 0.2827305758078876,
"grad_norm": 0.02143733762204647,
"learning_rate": 1.8115931567169567e-05,
"loss": 0.3163,
"step": 23500
},
{
"epoch": 0.28874611997401284,
"grad_norm": 9.730279922485352,
"learning_rate": 1.8075908146650947e-05,
"loss": 0.2817,
"step": 24000
},
{
"epoch": 0.2947616641401381,
"grad_norm": 9.340431213378906,
"learning_rate": 1.803580451887678e-05,
"loss": 0.3105,
"step": 24500
},
{
"epoch": 0.30077720830626337,
"grad_norm": 9.22760009765625,
"learning_rate": 1.799570089110261e-05,
"loss": 0.309,
"step": 25000
},
{
"epoch": 0.30679275247238863,
"grad_norm": 3.7310335636138916,
"learning_rate": 1.795567747058399e-05,
"loss": 0.3075,
"step": 25500
},
{
"epoch": 0.3128082966385139,
"grad_norm": 4.968273639678955,
"learning_rate": 1.7915573842809822e-05,
"loss": 0.3056,
"step": 26000
},
{
"epoch": 0.3188238408046392,
"grad_norm": 15.309103012084961,
"learning_rate": 1.7875470215035654e-05,
"loss": 0.3063,
"step": 26500
},
{
"epoch": 0.3248393849707645,
"grad_norm": 0.028000958263874054,
"learning_rate": 1.7835366587261484e-05,
"loss": 0.3251,
"step": 27000
},
{
"epoch": 0.33085492913688974,
"grad_norm": 11.808887481689453,
"learning_rate": 1.7795262959487316e-05,
"loss": 0.3335,
"step": 27500
},
{
"epoch": 0.336870473303015,
"grad_norm": 6.1232123374938965,
"learning_rate": 1.775515933171315e-05,
"loss": 0.2928,
"step": 28000
},
{
"epoch": 0.34288601746914027,
"grad_norm": 2.071223258972168,
"learning_rate": 1.771505570393898e-05,
"loss": 0.3228,
"step": 28500
},
{
"epoch": 0.34890156163526553,
"grad_norm": 6.20293664932251,
"learning_rate": 1.767495207616481e-05,
"loss": 0.3115,
"step": 29000
},
{
"epoch": 0.3549171058013908,
"grad_norm": 0.5319348573684692,
"learning_rate": 1.7634848448390644e-05,
"loss": 0.3424,
"step": 29500
},
{
"epoch": 0.36093264996751606,
"grad_norm": 0.15102390944957733,
"learning_rate": 1.7594825027872024e-05,
"loss": 0.3198,
"step": 30000
},
{
"epoch": 0.3669481941336413,
"grad_norm": 10.45068359375,
"learning_rate": 1.7554721400097853e-05,
"loss": 0.3083,
"step": 30500
},
{
"epoch": 0.3729637382997666,
"grad_norm": 0.3780848979949951,
"learning_rate": 1.7514697979579233e-05,
"loss": 0.3079,
"step": 31000
},
{
"epoch": 0.37897928246589185,
"grad_norm": 0.02213098295032978,
"learning_rate": 1.7474674559060613e-05,
"loss": 0.301,
"step": 31500
},
{
"epoch": 0.3849948266320171,
"grad_norm": 2.598281145095825,
"learning_rate": 1.7434570931286446e-05,
"loss": 0.2878,
"step": 32000
},
{
"epoch": 0.3910103707981424,
"grad_norm": 0.14519445598125458,
"learning_rate": 1.739446730351228e-05,
"loss": 0.3231,
"step": 32500
},
{
"epoch": 0.39702591496426765,
"grad_norm": 0.04602254554629326,
"learning_rate": 1.7354363675738108e-05,
"loss": 0.2972,
"step": 33000
},
{
"epoch": 0.4030414591303929,
"grad_norm": 18.00914192199707,
"learning_rate": 1.731426004796394e-05,
"loss": 0.323,
"step": 33500
},
{
"epoch": 0.40905700329651823,
"grad_norm": 6.137303829193115,
"learning_rate": 1.7274156420189773e-05,
"loss": 0.3179,
"step": 34000
},
{
"epoch": 0.4150725474626435,
"grad_norm": 3.145047187805176,
"learning_rate": 1.7234052792415602e-05,
"loss": 0.306,
"step": 34500
},
{
"epoch": 0.42108809162876876,
"grad_norm": 4.087808609008789,
"learning_rate": 1.7193949164641435e-05,
"loss": 0.325,
"step": 35000
},
{
"epoch": 0.427103635794894,
"grad_norm": 0.009332289919257164,
"learning_rate": 1.7153845536867268e-05,
"loss": 0.2979,
"step": 35500
},
{
"epoch": 0.4331191799610193,
"grad_norm": 9.885307312011719,
"learning_rate": 1.7113741909093097e-05,
"loss": 0.3247,
"step": 36000
},
{
"epoch": 0.43913472412714455,
"grad_norm": 0.0056028529070317745,
"learning_rate": 1.707363828131893e-05,
"loss": 0.2949,
"step": 36500
},
{
"epoch": 0.4451502682932698,
"grad_norm": 8.444794654846191,
"learning_rate": 1.7033534653544762e-05,
"loss": 0.306,
"step": 37000
},
{
"epoch": 0.4511658124593951,
"grad_norm": 2.0681827068328857,
"learning_rate": 1.699343102577059e-05,
"loss": 0.2792,
"step": 37500
},
{
"epoch": 0.45718135662552034,
"grad_norm": 0.017593583092093468,
"learning_rate": 1.695340760525197e-05,
"loss": 0.3225,
"step": 38000
},
{
"epoch": 0.4631969007916456,
"grad_norm": 3.3854148387908936,
"learning_rate": 1.6913303977477804e-05,
"loss": 0.2962,
"step": 38500
},
{
"epoch": 0.46921244495777087,
"grad_norm": 0.4375062584877014,
"learning_rate": 1.6873200349703637e-05,
"loss": 0.3146,
"step": 39000
},
{
"epoch": 0.47522798912389613,
"grad_norm": 0.24493420124053955,
"learning_rate": 1.6833096721929466e-05,
"loss": 0.2745,
"step": 39500
},
{
"epoch": 0.4812435332900214,
"grad_norm": 5.30033540725708,
"learning_rate": 1.6793153508666397e-05,
"loss": 0.3242,
"step": 40000
},
{
"epoch": 0.48725907745614666,
"grad_norm": 0.08991783857345581,
"learning_rate": 1.6753049880892226e-05,
"loss": 0.2889,
"step": 40500
},
{
"epoch": 0.4932746216222719,
"grad_norm": 0.16233447194099426,
"learning_rate": 1.671294625311806e-05,
"loss": 0.3147,
"step": 41000
},
{
"epoch": 0.49929016578839724,
"grad_norm": 21.49981117248535,
"learning_rate": 1.6672842625343892e-05,
"loss": 0.3097,
"step": 41500
},
{
"epoch": 0.5053057099545225,
"grad_norm": 0.07683174312114716,
"learning_rate": 1.663281920482527e-05,
"loss": 0.3283,
"step": 42000
},
{
"epoch": 0.5113212541206478,
"grad_norm": 5.969061374664307,
"learning_rate": 1.65927155770511e-05,
"loss": 0.2973,
"step": 42500
},
{
"epoch": 0.517336798286773,
"grad_norm": 0.1165817528963089,
"learning_rate": 1.6552611949276934e-05,
"loss": 0.2865,
"step": 43000
},
{
"epoch": 0.5233523424528983,
"grad_norm": 4.852444648742676,
"learning_rate": 1.6512508321502766e-05,
"loss": 0.2995,
"step": 43500
},
{
"epoch": 0.5293678866190236,
"grad_norm": 0.3422677218914032,
"learning_rate": 1.6472404693728596e-05,
"loss": 0.3078,
"step": 44000
},
{
"epoch": 0.5353834307851488,
"grad_norm": 0.02138070948421955,
"learning_rate": 1.643230106595443e-05,
"loss": 0.3041,
"step": 44500
},
{
"epoch": 0.5413989749512741,
"grad_norm": 19.726158142089844,
"learning_rate": 1.639219743818026e-05,
"loss": 0.2747,
"step": 45000
},
{
"epoch": 0.5474145191173994,
"grad_norm": 0.8494102358818054,
"learning_rate": 1.635209381040609e-05,
"loss": 0.3165,
"step": 45500
},
{
"epoch": 0.5534300632835246,
"grad_norm": 69.28723907470703,
"learning_rate": 1.6311990182631923e-05,
"loss": 0.286,
"step": 46000
},
{
"epoch": 0.5594456074496499,
"grad_norm": 5.112706661224365,
"learning_rate": 1.6271886554857756e-05,
"loss": 0.3103,
"step": 46500
},
{
"epoch": 0.5654611516157751,
"grad_norm": 0.0373302698135376,
"learning_rate": 1.6231782927083585e-05,
"loss": 0.2966,
"step": 47000
},
{
"epoch": 0.5714766957819004,
"grad_norm": 0.3029526472091675,
"learning_rate": 1.6191679299309418e-05,
"loss": 0.2785,
"step": 47500
},
{
"epoch": 0.5774922399480257,
"grad_norm": 0.18373580276966095,
"learning_rate": 1.6151655878790798e-05,
"loss": 0.3328,
"step": 48000
},
{
"epoch": 0.5835077841141509,
"grad_norm": 0.8737627267837524,
"learning_rate": 1.611155225101663e-05,
"loss": 0.2909,
"step": 48500
},
{
"epoch": 0.5895233282802762,
"grad_norm": 2.7465710639953613,
"learning_rate": 1.607144862324246e-05,
"loss": 0.2991,
"step": 49000
},
{
"epoch": 0.5955388724464015,
"grad_norm": 19.38477325439453,
"learning_rate": 1.6031344995468292e-05,
"loss": 0.2785,
"step": 49500
},
{
"epoch": 0.6015544166125267,
"grad_norm": 0.013609528541564941,
"learning_rate": 1.5991241367694125e-05,
"loss": 0.3198,
"step": 50000
},
{
"epoch": 0.607569960778652,
"grad_norm": 6.444379806518555,
"learning_rate": 1.5951137739919954e-05,
"loss": 0.2816,
"step": 50500
},
{
"epoch": 0.6135855049447773,
"grad_norm": 6.260073661804199,
"learning_rate": 1.5911114319401334e-05,
"loss": 0.3193,
"step": 51000
},
{
"epoch": 0.6196010491109025,
"grad_norm": 8.19072151184082,
"learning_rate": 1.5871010691627167e-05,
"loss": 0.3021,
"step": 51500
},
{
"epoch": 0.6256165932770278,
"grad_norm": 5.27684211730957,
"learning_rate": 1.5830907063853e-05,
"loss": 0.2925,
"step": 52000
},
{
"epoch": 0.631632137443153,
"grad_norm": 0.031686898320913315,
"learning_rate": 1.579080343607883e-05,
"loss": 0.3063,
"step": 52500
},
{
"epoch": 0.6376476816092784,
"grad_norm": 0.15889908373355865,
"learning_rate": 1.575078001556021e-05,
"loss": 0.2893,
"step": 53000
},
{
"epoch": 0.6436632257754037,
"grad_norm": 5.396053314208984,
"learning_rate": 1.571067638778604e-05,
"loss": 0.3009,
"step": 53500
},
{
"epoch": 0.649678769941529,
"grad_norm": 0.15985211730003357,
"learning_rate": 1.5670572760011874e-05,
"loss": 0.2985,
"step": 54000
},
{
"epoch": 0.6556943141076542,
"grad_norm": 2.195845127105713,
"learning_rate": 1.5630469132237704e-05,
"loss": 0.2886,
"step": 54500
},
{
"epoch": 0.6617098582737795,
"grad_norm": 0.000236680411035195,
"learning_rate": 1.5590365504463536e-05,
"loss": 0.3041,
"step": 55000
},
{
"epoch": 0.6677254024399047,
"grad_norm": 0.012887202203273773,
"learning_rate": 1.555026187668937e-05,
"loss": 0.2848,
"step": 55500
},
{
"epoch": 0.67374094660603,
"grad_norm": 0.025558780878782272,
"learning_rate": 1.5510158248915198e-05,
"loss": 0.3037,
"step": 56000
},
{
"epoch": 0.6797564907721553,
"grad_norm": 13.303611755371094,
"learning_rate": 1.547013482839658e-05,
"loss": 0.3052,
"step": 56500
},
{
"epoch": 0.6857720349382805,
"grad_norm": 7.728808403015137,
"learning_rate": 1.543003120062241e-05,
"loss": 0.3076,
"step": 57000
},
{
"epoch": 0.6917875791044058,
"grad_norm": 7.381441593170166,
"learning_rate": 1.538992757284824e-05,
"loss": 0.2838,
"step": 57500
},
{
"epoch": 0.6978031232705311,
"grad_norm": 47.043766021728516,
"learning_rate": 1.5349823945074073e-05,
"loss": 0.2894,
"step": 58000
},
{
"epoch": 0.7038186674366563,
"grad_norm": 1.3084138631820679,
"learning_rate": 1.5309720317299902e-05,
"loss": 0.3089,
"step": 58500
},
{
"epoch": 0.7098342116027816,
"grad_norm": 0.28533193469047546,
"learning_rate": 1.5269616689525735e-05,
"loss": 0.3291,
"step": 59000
},
{
"epoch": 0.7158497557689069,
"grad_norm": 0.006308581214398146,
"learning_rate": 1.5229513061751566e-05,
"loss": 0.2842,
"step": 59500
},
{
"epoch": 0.7218652999350321,
"grad_norm": 0.18038663268089294,
"learning_rate": 1.5189409433977399e-05,
"loss": 0.2964,
"step": 60000
},
{
"epoch": 0.7278808441011574,
"grad_norm": 9.797381401062012,
"learning_rate": 1.514930580620323e-05,
"loss": 0.3303,
"step": 60500
},
{
"epoch": 0.7338963882672827,
"grad_norm": 0.27853924036026,
"learning_rate": 1.510920217842906e-05,
"loss": 0.2762,
"step": 61000
},
{
"epoch": 0.7399119324334079,
"grad_norm": 0.13307029008865356,
"learning_rate": 1.506917875791044e-05,
"loss": 0.2798,
"step": 61500
},
{
"epoch": 0.7459274765995332,
"grad_norm": 4.89689826965332,
"learning_rate": 1.5029075130136273e-05,
"loss": 0.3202,
"step": 62000
},
{
"epoch": 0.7519430207656584,
"grad_norm": 14.662346839904785,
"learning_rate": 1.4988971502362104e-05,
"loss": 0.3051,
"step": 62500
},
{
"epoch": 0.7579585649317837,
"grad_norm": 0.15676206350326538,
"learning_rate": 1.4948867874587935e-05,
"loss": 0.3065,
"step": 63000
},
{
"epoch": 0.763974109097909,
"grad_norm": 10.646651268005371,
"learning_rate": 1.4908764246813768e-05,
"loss": 0.2741,
"step": 63500
},
{
"epoch": 0.7699896532640342,
"grad_norm": 7.507605075836182,
"learning_rate": 1.4868740826295148e-05,
"loss": 0.3195,
"step": 64000
},
{
"epoch": 0.7760051974301595,
"grad_norm": 0.24443697929382324,
"learning_rate": 1.4828717405776526e-05,
"loss": 0.2662,
"step": 64500
},
{
"epoch": 0.7820207415962848,
"grad_norm": 7.655556678771973,
"learning_rate": 1.4788613778002359e-05,
"loss": 0.3168,
"step": 65000
},
{
"epoch": 0.78803628576241,
"grad_norm": 7.646127223968506,
"learning_rate": 1.474851015022819e-05,
"loss": 0.2803,
"step": 65500
},
{
"epoch": 0.7940518299285353,
"grad_norm": 0.0017321036430075765,
"learning_rate": 1.4708406522454023e-05,
"loss": 0.2779,
"step": 66000
},
{
"epoch": 0.8000673740946606,
"grad_norm": 0.004854683298617601,
"learning_rate": 1.4668302894679854e-05,
"loss": 0.3048,
"step": 66500
},
{
"epoch": 0.8060829182607858,
"grad_norm": 5.331120491027832,
"learning_rate": 1.4628199266905685e-05,
"loss": 0.3121,
"step": 67000
},
{
"epoch": 0.8120984624269111,
"grad_norm": 4.585981369018555,
"learning_rate": 1.4588095639131517e-05,
"loss": 0.2968,
"step": 67500
},
{
"epoch": 0.8181140065930365,
"grad_norm": 0.0019534584134817123,
"learning_rate": 1.4548072218612896e-05,
"loss": 0.2869,
"step": 68000
},
{
"epoch": 0.8241295507591617,
"grad_norm": 0.03927936032414436,
"learning_rate": 1.4507968590838728e-05,
"loss": 0.2942,
"step": 68500
},
{
"epoch": 0.830145094925287,
"grad_norm": 0.018528884276747704,
"learning_rate": 1.446786496306456e-05,
"loss": 0.2929,
"step": 69000
},
{
"epoch": 0.8361606390914122,
"grad_norm": 2.7401936054229736,
"learning_rate": 1.4427761335290392e-05,
"loss": 0.272,
"step": 69500
},
{
"epoch": 0.8421761832575375,
"grad_norm": 0.07152987271547318,
"learning_rate": 1.4387657707516223e-05,
"loss": 0.3162,
"step": 70000
},
{
"epoch": 0.8481917274236628,
"grad_norm": 5.40221643447876,
"learning_rate": 1.4347634286997603e-05,
"loss": 0.2717,
"step": 70500
},
{
"epoch": 0.854207271589788,
"grad_norm": 2.5662572383880615,
"learning_rate": 1.4307530659223434e-05,
"loss": 0.2913,
"step": 71000
},
{
"epoch": 0.8602228157559133,
"grad_norm": 6.399110794067383,
"learning_rate": 1.4267427031449267e-05,
"loss": 0.279,
"step": 71500
},
{
"epoch": 0.8662383599220386,
"grad_norm": 4.513882160186768,
"learning_rate": 1.4227323403675098e-05,
"loss": 0.3091,
"step": 72000
},
{
"epoch": 0.8722539040881638,
"grad_norm": 2.024488925933838,
"learning_rate": 1.4187219775900929e-05,
"loss": 0.2831,
"step": 72500
},
{
"epoch": 0.8782694482542891,
"grad_norm": 9.901945114135742,
"learning_rate": 1.4147116148126761e-05,
"loss": 0.2863,
"step": 73000
},
{
"epoch": 0.8842849924204144,
"grad_norm": 1.1856168508529663,
"learning_rate": 1.410709272760814e-05,
"loss": 0.2754,
"step": 73500
},
{
"epoch": 0.8903005365865396,
"grad_norm": 4.077794075012207,
"learning_rate": 1.4066989099833972e-05,
"loss": 0.298,
"step": 74000
},
{
"epoch": 0.8963160807526649,
"grad_norm": 0.2611056864261627,
"learning_rate": 1.4026885472059803e-05,
"loss": 0.2985,
"step": 74500
},
{
"epoch": 0.9023316249187902,
"grad_norm": 0.1290031224489212,
"learning_rate": 1.3986781844285636e-05,
"loss": 0.2798,
"step": 75000
},
{
"epoch": 0.9083471690849154,
"grad_norm": 0.1582382321357727,
"learning_rate": 1.3946678216511467e-05,
"loss": 0.2908,
"step": 75500
},
{
"epoch": 0.9143627132510407,
"grad_norm": 6.692793369293213,
"learning_rate": 1.3906574588737298e-05,
"loss": 0.2905,
"step": 76000
},
{
"epoch": 0.920378257417166,
"grad_norm": 12.920578002929688,
"learning_rate": 1.386647096096313e-05,
"loss": 0.306,
"step": 76500
},
{
"epoch": 0.9263938015832912,
"grad_norm": 0.14715702831745148,
"learning_rate": 1.3826367333188961e-05,
"loss": 0.2911,
"step": 77000
},
{
"epoch": 0.9324093457494165,
"grad_norm": 7.250746726989746,
"learning_rate": 1.3786343912670342e-05,
"loss": 0.2911,
"step": 77500
},
{
"epoch": 0.9384248899155417,
"grad_norm": 9.904165267944336,
"learning_rate": 1.3746240284896173e-05,
"loss": 0.2893,
"step": 78000
},
{
"epoch": 0.944440434081667,
"grad_norm": 1.3197027444839478,
"learning_rate": 1.3706136657122005e-05,
"loss": 0.295,
"step": 78500
},
{
"epoch": 0.9504559782477923,
"grad_norm": 0.5488787889480591,
"learning_rate": 1.3666033029347836e-05,
"loss": 0.2617,
"step": 79000
},
{
"epoch": 0.9564715224139175,
"grad_norm": 0.007599683478474617,
"learning_rate": 1.3625929401573667e-05,
"loss": 0.2874,
"step": 79500
},
{
"epoch": 0.9624870665800428,
"grad_norm": 28.689912796020508,
"learning_rate": 1.35858257737995e-05,
"loss": 0.2954,
"step": 80000
},
{
"epoch": 0.9685026107461681,
"grad_norm": 1.1681454181671143,
"learning_rate": 1.354572214602533e-05,
"loss": 0.2893,
"step": 80500
},
{
"epoch": 0.9745181549122933,
"grad_norm": 26.251056671142578,
"learning_rate": 1.3505618518251162e-05,
"loss": 0.2771,
"step": 81000
},
{
"epoch": 0.9805336990784186,
"grad_norm": 19.04422950744629,
"learning_rate": 1.3465514890476994e-05,
"loss": 0.2556,
"step": 81500
},
{
"epoch": 0.9865492432445438,
"grad_norm": 6.7204909324646,
"learning_rate": 1.3425411262702825e-05,
"loss": 0.2999,
"step": 82000
},
{
"epoch": 0.9925647874106691,
"grad_norm": 0.3453606069087982,
"learning_rate": 1.3385307634928656e-05,
"loss": 0.2795,
"step": 82500
},
{
"epoch": 0.9985803315767945,
"grad_norm": 2.7078826427459717,
"learning_rate": 1.3345204007154489e-05,
"loss": 0.2727,
"step": 83000
},
{
"epoch": 1.0,
"eval_accuracy": 0.6738478320152713,
"eval_f1": 0.69947127247092,
"eval_loss": 0.287530779838562,
"eval_roc_auc": 0.8124488682847013,
"eval_runtime": 111.0668,
"eval_samples_per_second": 66.032,
"eval_steps_per_second": 66.032,
"step": 83118
},
{
"epoch": 1.0045958757429196,
"grad_norm": 8.754840850830078,
"learning_rate": 1.3305180586635869e-05,
"loss": 0.2439,
"step": 83500
},
{
"epoch": 1.010611419909045,
"grad_norm": 23.18345069885254,
"learning_rate": 1.3265237373372797e-05,
"loss": 0.2343,
"step": 84000
},
{
"epoch": 1.0166269640751702,
"grad_norm": 0.2795548141002655,
"learning_rate": 1.322513374559863e-05,
"loss": 0.2591,
"step": 84500
},
{
"epoch": 1.0226425082412955,
"grad_norm": 23.055418014526367,
"learning_rate": 1.318503011782446e-05,
"loss": 0.2403,
"step": 85000
},
{
"epoch": 1.0286580524074207,
"grad_norm": 0.0004251549835316837,
"learning_rate": 1.3144926490050291e-05,
"loss": 0.2575,
"step": 85500
},
{
"epoch": 1.034673596573546,
"grad_norm": 0.0705941691994667,
"learning_rate": 1.3104822862276124e-05,
"loss": 0.2298,
"step": 86000
},
{
"epoch": 1.0406891407396712,
"grad_norm": 0.14872965216636658,
"learning_rate": 1.3064719234501955e-05,
"loss": 0.2556,
"step": 86500
},
{
"epoch": 1.0467046849057966,
"grad_norm": 29.70933723449707,
"learning_rate": 1.3024615606727786e-05,
"loss": 0.246,
"step": 87000
},
{
"epoch": 1.0527202290719218,
"grad_norm": 0.12189821898937225,
"learning_rate": 1.2984511978953618e-05,
"loss": 0.2486,
"step": 87500
},
{
"epoch": 1.0587357732380471,
"grad_norm": 16.819997787475586,
"learning_rate": 1.2944488558434999e-05,
"loss": 0.2738,
"step": 88000
},
{
"epoch": 1.0647513174041723,
"grad_norm": 0.504358172416687,
"learning_rate": 1.290438493066083e-05,
"loss": 0.2512,
"step": 88500
},
{
"epoch": 1.0707668615702977,
"grad_norm": 33.048221588134766,
"learning_rate": 1.286428130288666e-05,
"loss": 0.2464,
"step": 89000
},
{
"epoch": 1.0767824057364228,
"grad_norm": 10.764643669128418,
"learning_rate": 1.2824177675112493e-05,
"loss": 0.2633,
"step": 89500
},
{
"epoch": 1.0827979499025482,
"grad_norm": 0.08640070259571075,
"learning_rate": 1.2784074047338324e-05,
"loss": 0.2406,
"step": 90000
},
{
"epoch": 1.0888134940686736,
"grad_norm": 14.491482734680176,
"learning_rate": 1.2744050626819704e-05,
"loss": 0.2914,
"step": 90500
},
{
"epoch": 1.0948290382347987,
"grad_norm": 6.458908557891846,
"learning_rate": 1.2703946999045535e-05,
"loss": 0.2768,
"step": 91000
},
{
"epoch": 1.100844582400924,
"grad_norm": 3.36252498626709,
"learning_rate": 1.2663923578526915e-05,
"loss": 0.2275,
"step": 91500
},
{
"epoch": 1.1068601265670492,
"grad_norm": 7.332001209259033,
"learning_rate": 1.2623819950752746e-05,
"loss": 0.2561,
"step": 92000
},
{
"epoch": 1.1128756707331746,
"grad_norm": 0.10880118608474731,
"learning_rate": 1.2583796530234126e-05,
"loss": 0.26,
"step": 92500
},
{
"epoch": 1.1188912148992998,
"grad_norm": 0.041975826025009155,
"learning_rate": 1.2543692902459959e-05,
"loss": 0.2588,
"step": 93000
},
{
"epoch": 1.1249067590654251,
"grad_norm": 19.071786880493164,
"learning_rate": 1.250358927468579e-05,
"loss": 0.2537,
"step": 93500
},
{
"epoch": 1.1309223032315503,
"grad_norm": 0.5620952248573303,
"learning_rate": 1.2463485646911621e-05,
"loss": 0.2694,
"step": 94000
},
{
"epoch": 1.1369378473976757,
"grad_norm": 0.4685472548007965,
"learning_rate": 1.2423382019137454e-05,
"loss": 0.2255,
"step": 94500
},
{
"epoch": 1.1429533915638008,
"grad_norm": 0.004374380223453045,
"learning_rate": 1.2383278391363285e-05,
"loss": 0.2979,
"step": 95000
},
{
"epoch": 1.1489689357299262,
"grad_norm": 0.21592481434345245,
"learning_rate": 1.2343174763589117e-05,
"loss": 0.2425,
"step": 95500
},
{
"epoch": 1.1549844798960514,
"grad_norm": 38.64509963989258,
"learning_rate": 1.2303071135814946e-05,
"loss": 0.2534,
"step": 96000
},
{
"epoch": 1.1610000240621767,
"grad_norm": 0.1709357500076294,
"learning_rate": 1.2262967508040777e-05,
"loss": 0.2277,
"step": 96500
},
{
"epoch": 1.1670155682283019,
"grad_norm": 0.02226692996919155,
"learning_rate": 1.2222863880266608e-05,
"loss": 0.248,
"step": 97000
},
{
"epoch": 1.1730311123944273,
"grad_norm": 0.11720846593379974,
"learning_rate": 1.2182760252492441e-05,
"loss": 0.2818,
"step": 97500
},
{
"epoch": 1.1790466565605524,
"grad_norm": 0.17743727564811707,
"learning_rate": 1.2142656624718272e-05,
"loss": 0.2625,
"step": 98000
},
{
"epoch": 1.1850622007266778,
"grad_norm": 0.5575046539306641,
"learning_rate": 1.2102633204199652e-05,
"loss": 0.274,
"step": 98500
},
{
"epoch": 1.191077744892803,
"grad_norm": 0.0813954770565033,
"learning_rate": 1.2062529576425483e-05,
"loss": 0.2764,
"step": 99000
},
{
"epoch": 1.1970932890589283,
"grad_norm": 6.563291549682617,
"learning_rate": 1.2022425948651316e-05,
"loss": 0.2426,
"step": 99500
},
{
"epoch": 1.2031088332250535,
"grad_norm": 4.957773126079701e-05,
"learning_rate": 1.1982322320877147e-05,
"loss": 0.2558,
"step": 100000
},
{
"epoch": 1.2091243773911788,
"grad_norm": 0.08407856523990631,
"learning_rate": 1.1942218693102978e-05,
"loss": 0.244,
"step": 100500
},
{
"epoch": 1.215139921557304,
"grad_norm": 0.04238196834921837,
"learning_rate": 1.1902195272584358e-05,
"loss": 0.2635,
"step": 101000
},
{
"epoch": 1.2211554657234294,
"grad_norm": 0.10858649015426636,
"learning_rate": 1.186209164481019e-05,
"loss": 0.2536,
"step": 101500
},
{
"epoch": 1.2271710098895545,
"grad_norm": 0.14419694244861603,
"learning_rate": 1.1821988017036021e-05,
"loss": 0.2803,
"step": 102000
},
{
"epoch": 1.23318655405568,
"grad_norm": 0.1416100561618805,
"learning_rate": 1.1781964596517402e-05,
"loss": 0.2198,
"step": 102500
},
{
"epoch": 1.239202098221805,
"grad_norm": 0.16191110014915466,
"learning_rate": 1.1741860968743232e-05,
"loss": 0.2687,
"step": 103000
},
{
"epoch": 1.2452176423879304,
"grad_norm": 3.2424659729003906,
"learning_rate": 1.1701757340969065e-05,
"loss": 0.2531,
"step": 103500
},
{
"epoch": 1.2512331865540558,
"grad_norm": 0.009034248068928719,
"learning_rate": 1.1661653713194896e-05,
"loss": 0.2144,
"step": 104000
},
{
"epoch": 1.257248730720181,
"grad_norm": 4.68690299987793,
"learning_rate": 1.1621550085420727e-05,
"loss": 0.2536,
"step": 104500
},
{
"epoch": 1.263264274886306,
"grad_norm": 0.04760544002056122,
"learning_rate": 1.158144645764656e-05,
"loss": 0.2394,
"step": 105000
},
{
"epoch": 1.2692798190524315,
"grad_norm": 0.027170976623892784,
"learning_rate": 1.154134282987239e-05,
"loss": 0.2644,
"step": 105500
},
{
"epoch": 1.2752953632185569,
"grad_norm": 29.608776092529297,
"learning_rate": 1.1501239202098222e-05,
"loss": 0.2456,
"step": 106000
},
{
"epoch": 1.281310907384682,
"grad_norm": 3.261889934539795,
"learning_rate": 1.1461135574324054e-05,
"loss": 0.2182,
"step": 106500
},
{
"epoch": 1.2873264515508072,
"grad_norm": 0.0033520832657814026,
"learning_rate": 1.1421031946549885e-05,
"loss": 0.2612,
"step": 107000
},
{
"epoch": 1.2933419957169325,
"grad_norm": 0.0770372673869133,
"learning_rate": 1.1380928318775716e-05,
"loss": 0.2517,
"step": 107500
},
{
"epoch": 1.299357539883058,
"grad_norm": 0.004508438985794783,
"learning_rate": 1.1340904898257096e-05,
"loss": 0.2552,
"step": 108000
},
{
"epoch": 1.305373084049183,
"grad_norm": 0.05816527456045151,
"learning_rate": 1.1300801270482929e-05,
"loss": 0.2577,
"step": 108500
},
{
"epoch": 1.3113886282153084,
"grad_norm": 8.914427757263184,
"learning_rate": 1.126069764270876e-05,
"loss": 0.2177,
"step": 109000
},
{
"epoch": 1.3174041723814336,
"grad_norm": 0.0006245987024158239,
"learning_rate": 1.1220594014934591e-05,
"loss": 0.2321,
"step": 109500
},
{
"epoch": 1.323419716547559,
"grad_norm": 10.717325210571289,
"learning_rate": 1.1180490387160424e-05,
"loss": 0.2532,
"step": 110000
},
{
"epoch": 1.3294352607136841,
"grad_norm": 3.988328218460083,
"learning_rate": 1.1140386759386255e-05,
"loss": 0.2733,
"step": 110500
},
{
"epoch": 1.3354508048798095,
"grad_norm": 9.221200942993164,
"learning_rate": 1.1100363338867635e-05,
"loss": 0.2611,
"step": 111000
},
{
"epoch": 1.3414663490459346,
"grad_norm": 0.08797607570886612,
"learning_rate": 1.1060259711093466e-05,
"loss": 0.2608,
"step": 111500
},
{
"epoch": 1.34748189321206,
"grad_norm": 0.018298862501978874,
"learning_rate": 1.1020156083319298e-05,
"loss": 0.2579,
"step": 112000
},
{
"epoch": 1.3534974373781852,
"grad_norm": 0.033742956817150116,
"learning_rate": 1.098005245554513e-05,
"loss": 0.2431,
"step": 112500
},
{
"epoch": 1.3595129815443106,
"grad_norm": 0.06899914145469666,
"learning_rate": 1.093994882777096e-05,
"loss": 0.253,
"step": 113000
},
{
"epoch": 1.3655285257104357,
"grad_norm": 0.1469135880470276,
"learning_rate": 1.0899845199996793e-05,
"loss": 0.244,
"step": 113500
},
{
"epoch": 1.371544069876561,
"grad_norm": 5.18328857421875,
"learning_rate": 1.0859821779478173e-05,
"loss": 0.2654,
"step": 114000
},
{
"epoch": 1.3775596140426862,
"grad_norm": 24.231332778930664,
"learning_rate": 1.0819718151704004e-05,
"loss": 0.237,
"step": 114500
},
{
"epoch": 1.3835751582088116,
"grad_norm": 0.2826213240623474,
"learning_rate": 1.0779614523929835e-05,
"loss": 0.2602,
"step": 115000
},
{
"epoch": 1.3895907023749368,
"grad_norm": 0.09144386649131775,
"learning_rate": 1.0739510896155668e-05,
"loss": 0.249,
"step": 115500
},
{
"epoch": 1.3956062465410621,
"grad_norm": 40.423797607421875,
"learning_rate": 1.0699407268381499e-05,
"loss": 0.2621,
"step": 116000
},
{
"epoch": 1.4016217907071873,
"grad_norm": 0.01249714009463787,
"learning_rate": 1.0659383847862879e-05,
"loss": 0.2451,
"step": 116500
},
{
"epoch": 1.4076373348733127,
"grad_norm": 0.14570143818855286,
"learning_rate": 1.061928022008871e-05,
"loss": 0.2481,
"step": 117000
},
{
"epoch": 1.413652879039438,
"grad_norm": 8.342629432678223,
"learning_rate": 1.0579176592314542e-05,
"loss": 0.2813,
"step": 117500
},
{
"epoch": 1.4196684232055632,
"grad_norm": 0.0001358153240289539,
"learning_rate": 1.0539072964540373e-05,
"loss": 0.2425,
"step": 118000
},
{
"epoch": 1.4256839673716883,
"grad_norm": 0.2505891025066376,
"learning_rate": 1.0498969336766204e-05,
"loss": 0.243,
"step": 118500
},
{
"epoch": 1.4316995115378137,
"grad_norm": 0.25133442878723145,
"learning_rate": 1.0458865708992037e-05,
"loss": 0.2313,
"step": 119000
},
{
"epoch": 1.437715055703939,
"grad_norm": 0.00044232659274712205,
"learning_rate": 1.0418762081217868e-05,
"loss": 0.2426,
"step": 119500
},
{
"epoch": 1.4437305998700642,
"grad_norm": 0.0011125517776235938,
"learning_rate": 1.0378658453443699e-05,
"loss": 0.2418,
"step": 120000
},
{
"epoch": 1.4497461440361894,
"grad_norm": 7.526780605316162,
"learning_rate": 1.0338635032925079e-05,
"loss": 0.2797,
"step": 120500
},
{
"epoch": 1.4557616882023148,
"grad_norm": 6.034137725830078,
"learning_rate": 1.0298531405150912e-05,
"loss": 0.2655,
"step": 121000
},
{
"epoch": 1.4617772323684401,
"grad_norm": 0.021329816430807114,
"learning_rate": 1.0258427777376743e-05,
"loss": 0.2633,
"step": 121500
},
{
"epoch": 1.4677927765345653,
"grad_norm": 0.013099109753966331,
"learning_rate": 1.0218324149602574e-05,
"loss": 0.2739,
"step": 122000
},
{
"epoch": 1.4738083207006905,
"grad_norm": 71.02497863769531,
"learning_rate": 1.0178300729083954e-05,
"loss": 0.254,
"step": 122500
},
{
"epoch": 1.4798238648668158,
"grad_norm": 15.551078796386719,
"learning_rate": 1.0138197101309786e-05,
"loss": 0.2602,
"step": 123000
},
{
"epoch": 1.4858394090329412,
"grad_norm": 4.560909655992873e-05,
"learning_rate": 1.0098093473535617e-05,
"loss": 0.2387,
"step": 123500
},
{
"epoch": 1.4918549531990664,
"grad_norm": 0.14415931701660156,
"learning_rate": 1.0057989845761448e-05,
"loss": 0.2823,
"step": 124000
},
{
"epoch": 1.4978704973651917,
"grad_norm": 43.61802673339844,
"learning_rate": 1.0017886217987281e-05,
"loss": 0.2378,
"step": 124500
},
{
"epoch": 1.5038860415313169,
"grad_norm": 0.20755548775196075,
"learning_rate": 9.977782590213112e-06,
"loss": 0.2433,
"step": 125000
},
{
"epoch": 1.5099015856974423,
"grad_norm": 0.2520395815372467,
"learning_rate": 9.937678962438943e-06,
"loss": 0.2539,
"step": 125500
},
{
"epoch": 1.5159171298635674,
"grad_norm": 0.3743610382080078,
"learning_rate": 9.897575334664774e-06,
"loss": 0.2789,
"step": 126000
},
{
"epoch": 1.5219326740296926,
"grad_norm": 0.0024790179450064898,
"learning_rate": 9.857551914146154e-06,
"loss": 0.2258,
"step": 126500
},
{
"epoch": 1.527948218195818,
"grad_norm": 21.557737350463867,
"learning_rate": 9.817448286371985e-06,
"loss": 0.2692,
"step": 127000
},
{
"epoch": 1.5339637623619433,
"grad_norm": 0.00022073234140407294,
"learning_rate": 9.777344658597818e-06,
"loss": 0.2674,
"step": 127500
},
{
"epoch": 1.5399793065280685,
"grad_norm": 0.0013155222404748201,
"learning_rate": 9.737241030823649e-06,
"loss": 0.242,
"step": 128000
},
{
"epoch": 1.5459948506941938,
"grad_norm": 0.2000940591096878,
"learning_rate": 9.69713740304948e-06,
"loss": 0.2487,
"step": 128500
},
{
"epoch": 1.5520103948603192,
"grad_norm": 0.0021518643479794264,
"learning_rate": 9.657033775275312e-06,
"loss": 0.2123,
"step": 129000
},
{
"epoch": 1.5580259390264444,
"grad_norm": 0.24494774639606476,
"learning_rate": 9.616930147501143e-06,
"loss": 0.2975,
"step": 129500
},
{
"epoch": 1.5640414831925695,
"grad_norm": 9.558631896972656,
"learning_rate": 9.576906726982523e-06,
"loss": 0.2485,
"step": 130000
},
{
"epoch": 1.570057027358695,
"grad_norm": 6.8279128074646,
"learning_rate": 9.536803099208354e-06,
"loss": 0.2589,
"step": 130500
},
{
"epoch": 1.5760725715248203,
"grad_norm": 0.018290262669324875,
"learning_rate": 9.496699471434187e-06,
"loss": 0.2605,
"step": 131000
},
{
"epoch": 1.5820881156909454,
"grad_norm": 0.19554129242897034,
"learning_rate": 9.456595843660018e-06,
"loss": 0.2405,
"step": 131500
},
{
"epoch": 1.5881036598570706,
"grad_norm": 26.12983512878418,
"learning_rate": 9.416492215885849e-06,
"loss": 0.2261,
"step": 132000
},
{
"epoch": 1.594119204023196,
"grad_norm": 19.12807273864746,
"learning_rate": 9.376388588111682e-06,
"loss": 0.2454,
"step": 132500
},
{
"epoch": 1.6001347481893213,
"grad_norm": 48.97768020629883,
"learning_rate": 9.336284960337513e-06,
"loss": 0.2488,
"step": 133000
},
{
"epoch": 1.6061502923554465,
"grad_norm": 0.10775639116764069,
"learning_rate": 9.296261539818893e-06,
"loss": 0.2712,
"step": 133500
},
{
"epoch": 1.6121658365215716,
"grad_norm": 18.01923942565918,
"learning_rate": 9.256157912044724e-06,
"loss": 0.2898,
"step": 134000
},
{
"epoch": 1.618181380687697,
"grad_norm": 0.057373084127902985,
"learning_rate": 9.216054284270556e-06,
"loss": 0.2235,
"step": 134500
},
{
"epoch": 1.6241969248538224,
"grad_norm": 0.0020849681459367275,
"learning_rate": 9.175950656496387e-06,
"loss": 0.2355,
"step": 135000
},
{
"epoch": 1.6302124690199475,
"grad_norm": 0.15205715596675873,
"learning_rate": 9.135847028722218e-06,
"loss": 0.2791,
"step": 135500
},
{
"epoch": 1.6362280131860727,
"grad_norm": 0.06727798283100128,
"learning_rate": 9.095823608203598e-06,
"loss": 0.2412,
"step": 136000
},
{
"epoch": 1.642243557352198,
"grad_norm": 0.19422870874404907,
"learning_rate": 9.055719980429431e-06,
"loss": 0.2431,
"step": 136500
},
{
"epoch": 1.6482591015183234,
"grad_norm": 0.06913313269615173,
"learning_rate": 9.015616352655262e-06,
"loss": 0.2687,
"step": 137000
},
{
"epoch": 1.6542746456844486,
"grad_norm": 0.06762461364269257,
"learning_rate": 8.975512724881093e-06,
"loss": 0.2498,
"step": 137500
},
{
"epoch": 1.6602901898505737,
"grad_norm": 0.1282985806465149,
"learning_rate": 8.935409097106926e-06,
"loss": 0.2503,
"step": 138000
},
{
"epoch": 1.6663057340166991,
"grad_norm": 0.6339439153671265,
"learning_rate": 8.895305469332756e-06,
"loss": 0.2454,
"step": 138500
},
{
"epoch": 1.6723212781828245,
"grad_norm": 0.15219150483608246,
"learning_rate": 8.855201841558587e-06,
"loss": 0.2845,
"step": 139000
},
{
"epoch": 1.6783368223489497,
"grad_norm": 5.600707054138184,
"learning_rate": 8.815178421039968e-06,
"loss": 0.2459,
"step": 139500
},
{
"epoch": 1.6843523665150748,
"grad_norm": 0.36313244700431824,
"learning_rate": 8.775155000521348e-06,
"loss": 0.2374,
"step": 140000
},
{
"epoch": 1.6903679106812002,
"grad_norm": 4.401086807250977,
"learning_rate": 8.73505137274718e-06,
"loss": 0.2425,
"step": 140500
},
{
"epoch": 1.6963834548473256,
"grad_norm": 0.00033720143255777657,
"learning_rate": 8.694947744973011e-06,
"loss": 0.2341,
"step": 141000
},
{
"epoch": 1.7023989990134507,
"grad_norm": 0.0821937769651413,
"learning_rate": 8.654844117198842e-06,
"loss": 0.2326,
"step": 141500
},
{
"epoch": 1.7084145431795759,
"grad_norm": 0.000721343734767288,
"learning_rate": 8.614740489424675e-06,
"loss": 0.2553,
"step": 142000
},
{
"epoch": 1.7144300873457012,
"grad_norm": 0.3059915602207184,
"learning_rate": 8.574636861650506e-06,
"loss": 0.2743,
"step": 142500
},
{
"epoch": 1.7204456315118266,
"grad_norm": 0.13650359213352203,
"learning_rate": 8.534533233876337e-06,
"loss": 0.255,
"step": 143000
},
{
"epoch": 1.7264611756779518,
"grad_norm": 0.16923360526561737,
"learning_rate": 8.49442960610217e-06,
"loss": 0.2832,
"step": 143500
},
{
"epoch": 1.7324767198440771,
"grad_norm": 0.0067064897157251835,
"learning_rate": 8.454325978328e-06,
"loss": 0.2425,
"step": 144000
},
{
"epoch": 1.7384922640102025,
"grad_norm": 0.33390843868255615,
"learning_rate": 8.41430255780938e-06,
"loss": 0.2577,
"step": 144500
},
{
"epoch": 1.7445078081763277,
"grad_norm": 0.11052338778972626,
"learning_rate": 8.374198930035212e-06,
"loss": 0.253,
"step": 145000
},
{
"epoch": 1.7505233523424528,
"grad_norm": 0.06620516628026962,
"learning_rate": 8.334095302261042e-06,
"loss": 0.2555,
"step": 145500
},
{
"epoch": 1.7565388965085782,
"grad_norm": 0.00559147447347641,
"learning_rate": 8.293991674486873e-06,
"loss": 0.2387,
"step": 146000
},
{
"epoch": 1.7625544406747036,
"grad_norm": 0.1951608508825302,
"learning_rate": 8.253888046712706e-06,
"loss": 0.2249,
"step": 146500
},
{
"epoch": 1.7685699848408287,
"grad_norm": 0.16532181203365326,
"learning_rate": 8.213784418938537e-06,
"loss": 0.2779,
"step": 147000
},
{
"epoch": 1.7745855290069539,
"grad_norm": 0.024670429527759552,
"learning_rate": 8.17368079116437e-06,
"loss": 0.24,
"step": 147500
},
{
"epoch": 1.7806010731730793,
"grad_norm": 0.7060034275054932,
"learning_rate": 8.1335771633902e-06,
"loss": 0.2646,
"step": 148000
},
{
"epoch": 1.7866166173392046,
"grad_norm": 0.16653361916542053,
"learning_rate": 8.093473535616032e-06,
"loss": 0.2829,
"step": 148500
},
{
"epoch": 1.7926321615053298,
"grad_norm": 9.015371322631836,
"learning_rate": 8.053369907841864e-06,
"loss": 0.2465,
"step": 149000
},
{
"epoch": 1.798647705671455,
"grad_norm": 0.08503203839063644,
"learning_rate": 8.013346487323243e-06,
"loss": 0.2476,
"step": 149500
},
{
"epoch": 1.8046632498375803,
"grad_norm": 0.31652989983558655,
"learning_rate": 7.973242859549075e-06,
"loss": 0.2807,
"step": 150000
},
{
"epoch": 1.8106787940037057,
"grad_norm": 20.23206329345703,
"learning_rate": 7.933139231774906e-06,
"loss": 0.2393,
"step": 150500
},
{
"epoch": 1.8166943381698308,
"grad_norm": 0.0023623614106327295,
"learning_rate": 7.893035604000739e-06,
"loss": 0.2606,
"step": 151000
},
{
"epoch": 1.822709882335956,
"grad_norm": 0.0003463963221292943,
"learning_rate": 7.85293197622657e-06,
"loss": 0.2377,
"step": 151500
},
{
"epoch": 1.8287254265020814,
"grad_norm": 22.177343368530273,
"learning_rate": 7.81290855570795e-06,
"loss": 0.225,
"step": 152000
},
{
"epoch": 1.8347409706682067,
"grad_norm": 0.16396382451057434,
"learning_rate": 7.772804927933781e-06,
"loss": 0.2749,
"step": 152500
},
{
"epoch": 1.840756514834332,
"grad_norm": 0.06666311621665955,
"learning_rate": 7.732701300159614e-06,
"loss": 0.2449,
"step": 153000
},
{
"epoch": 1.846772059000457,
"grad_norm": 0.2808719575405121,
"learning_rate": 7.692597672385445e-06,
"loss": 0.2201,
"step": 153500
},
{
"epoch": 1.8527876031665824,
"grad_norm": 0.02584654837846756,
"learning_rate": 7.652574251866825e-06,
"loss": 0.2555,
"step": 154000
},
{
"epoch": 1.8588031473327078,
"grad_norm": 6.074542999267578,
"learning_rate": 7.612470624092656e-06,
"loss": 0.2652,
"step": 154500
},
{
"epoch": 1.864818691498833,
"grad_norm": 0.007166345603764057,
"learning_rate": 7.572447203574036e-06,
"loss": 0.2777,
"step": 155000
},
{
"epoch": 1.870834235664958,
"grad_norm": 2.7224321365356445,
"learning_rate": 7.532423783055416e-06,
"loss": 0.2614,
"step": 155500
},
{
"epoch": 1.8768497798310835,
"grad_norm": 8.61175537109375,
"learning_rate": 7.492320155281248e-06,
"loss": 0.2716,
"step": 156000
},
{
"epoch": 1.8828653239972089,
"grad_norm": 0.4761998951435089,
"learning_rate": 7.452216527507079e-06,
"loss": 0.2617,
"step": 156500
},
{
"epoch": 1.888880868163334,
"grad_norm": 0.041520122438669205,
"learning_rate": 7.4121128997329105e-06,
"loss": 0.2233,
"step": 157000
},
{
"epoch": 1.8948964123294592,
"grad_norm": 0.3298840820789337,
"learning_rate": 7.372009271958742e-06,
"loss": 0.2905,
"step": 157500
},
{
"epoch": 1.9009119564955848,
"grad_norm": 0.14137232303619385,
"learning_rate": 7.331905644184573e-06,
"loss": 0.2489,
"step": 158000
},
{
"epoch": 1.90692750066171,
"grad_norm": 0.017581721767783165,
"learning_rate": 7.291802016410405e-06,
"loss": 0.2531,
"step": 158500
},
{
"epoch": 1.912943044827835,
"grad_norm": 0.10974815487861633,
"learning_rate": 7.251698388636237e-06,
"loss": 0.2387,
"step": 159000
},
{
"epoch": 1.9189585889939604,
"grad_norm": 0.09865211695432663,
"learning_rate": 7.211674968117617e-06,
"loss": 0.2364,
"step": 159500
},
{
"epoch": 1.9249741331600858,
"grad_norm": 0.004849567078053951,
"learning_rate": 7.171571340343448e-06,
"loss": 0.2751,
"step": 160000
},
{
"epoch": 1.930989677326211,
"grad_norm": 0.017855796962976456,
"learning_rate": 7.13146771256928e-06,
"loss": 0.2422,
"step": 160500
},
{
"epoch": 1.9370052214923361,
"grad_norm": 0.22124846279621124,
"learning_rate": 7.091364084795112e-06,
"loss": 0.2409,
"step": 161000
},
{
"epoch": 1.9430207656584615,
"grad_norm": 0.0023982899729162455,
"learning_rate": 7.051340664276492e-06,
"loss": 0.2699,
"step": 161500
},
{
"epoch": 1.9490363098245869,
"grad_norm": 8.00629997253418,
"learning_rate": 7.011237036502323e-06,
"loss": 0.2399,
"step": 162000
},
{
"epoch": 1.955051853990712,
"grad_norm": 7.468501091003418,
"learning_rate": 6.9711334087281545e-06,
"loss": 0.2535,
"step": 162500
},
{
"epoch": 1.9610673981568372,
"grad_norm": 0.4242821931838989,
"learning_rate": 6.931029780953986e-06,
"loss": 0.2506,
"step": 163000
},
{
"epoch": 1.9670829423229625,
"grad_norm": 0.020416131243109703,
"learning_rate": 6.890926153179817e-06,
"loss": 0.2482,
"step": 163500
},
{
"epoch": 1.973098486489088,
"grad_norm": 0.06718198210000992,
"learning_rate": 6.850822525405649e-06,
"loss": 0.2505,
"step": 164000
},
{
"epoch": 1.979114030655213,
"grad_norm": 0.7040701508522034,
"learning_rate": 6.81071889763148e-06,
"loss": 0.2196,
"step": 164500
},
{
"epoch": 1.9851295748213382,
"grad_norm": 0.1061306744813919,
"learning_rate": 6.770615269857311e-06,
"loss": 0.2699,
"step": 165000
},
{
"epoch": 1.9911451189874636,
"grad_norm": 0.1626858115196228,
"learning_rate": 6.730511642083143e-06,
"loss": 0.2428,
"step": 165500
},
{
"epoch": 1.997160663153589,
"grad_norm": 0.9182321429252625,
"learning_rate": 6.690408014308975e-06,
"loss": 0.2723,
"step": 166000
},
{
"epoch": 2.0,
"eval_accuracy": 0.6772566130351786,
"eval_f1": 0.7059909142532652,
"eval_loss": 0.30793967843055725,
"eval_roc_auc": 0.8147532042541586,
"eval_runtime": 109.0185,
"eval_samples_per_second": 67.273,
"eval_steps_per_second": 67.273,
"step": 166236
},
{
"epoch": 2.003176207319714,
"grad_norm": 0.0028744570445269346,
"learning_rate": 6.650304386534806e-06,
"loss": 0.2161,
"step": 166500
},
{
"epoch": 2.0091917514858393,
"grad_norm": 0.10508907586336136,
"learning_rate": 6.6102007587606375e-06,
"loss": 0.1802,
"step": 167000
},
{
"epoch": 2.015207295651965,
"grad_norm": 0.023608388379216194,
"learning_rate": 6.570257545497566e-06,
"loss": 0.1646,
"step": 167500
},
{
"epoch": 2.02122283981809,
"grad_norm": 0.0004214489890728146,
"learning_rate": 6.530153917723398e-06,
"loss": 0.1425,
"step": 168000
},
{
"epoch": 2.027238383984215,
"grad_norm": 0.15282946825027466,
"learning_rate": 6.490050289949229e-06,
"loss": 0.1767,
"step": 168500
},
{
"epoch": 2.0332539281503403,
"grad_norm": 0.5594785213470459,
"learning_rate": 6.4499466621750605e-06,
"loss": 0.1431,
"step": 169000
},
{
"epoch": 2.039269472316466,
"grad_norm": 0.006397734861820936,
"learning_rate": 6.409843034400892e-06,
"loss": 0.1433,
"step": 169500
},
{
"epoch": 2.045285016482591,
"grad_norm": 0.02048441767692566,
"learning_rate": 6.369739406626724e-06,
"loss": 0.1751,
"step": 170000
},
{
"epoch": 2.0513005606487162,
"grad_norm": 0.08668813109397888,
"learning_rate": 6.329635778852555e-06,
"loss": 0.1904,
"step": 170500
},
{
"epoch": 2.0573161048148414,
"grad_norm": 1.3654146194458008,
"learning_rate": 6.289612358333935e-06,
"loss": 0.1477,
"step": 171000
},
{
"epoch": 2.063331648980967,
"grad_norm": 0.10657373815774918,
"learning_rate": 6.249508730559767e-06,
"loss": 0.1651,
"step": 171500
},
{
"epoch": 2.069347193147092,
"grad_norm": 0.0666826069355011,
"learning_rate": 6.209405102785599e-06,
"loss": 0.1896,
"step": 172000
},
{
"epoch": 2.0753627373132173,
"grad_norm": 0.03260161355137825,
"learning_rate": 6.16930147501143e-06,
"loss": 0.1595,
"step": 172500
},
{
"epoch": 2.0813782814793425,
"grad_norm": 11.851826667785645,
"learning_rate": 6.129197847237262e-06,
"loss": 0.1664,
"step": 173000
},
{
"epoch": 2.087393825645468,
"grad_norm": 0.0001777495490387082,
"learning_rate": 6.089094219463093e-06,
"loss": 0.1719,
"step": 173500
},
{
"epoch": 2.093409369811593,
"grad_norm": 0.001742048654705286,
"learning_rate": 6.048990591688924e-06,
"loss": 0.1336,
"step": 174000
},
{
"epoch": 2.0994249139777184,
"grad_norm": 0.233998104929924,
"learning_rate": 6.008886963914756e-06,
"loss": 0.1659,
"step": 174500
},
{
"epoch": 2.1054404581438435,
"grad_norm": 0.011589854955673218,
"learning_rate": 5.968863543396136e-06,
"loss": 0.1662,
"step": 175000
},
{
"epoch": 2.111456002309969,
"grad_norm": 0.003179445629939437,
"learning_rate": 5.928759915621968e-06,
"loss": 0.201,
"step": 175500
},
{
"epoch": 2.1174715464760943,
"grad_norm": 0.05272052437067032,
"learning_rate": 5.888656287847799e-06,
"loss": 0.1648,
"step": 176000
},
{
"epoch": 2.1234870906422194,
"grad_norm": 17.65900421142578,
"learning_rate": 5.848552660073631e-06,
"loss": 0.1499,
"step": 176500
},
{
"epoch": 2.1295026348083446,
"grad_norm": 0.017062697559595108,
"learning_rate": 5.808449032299463e-06,
"loss": 0.1812,
"step": 177000
},
{
"epoch": 2.13551817897447,
"grad_norm": 0.062195923179388046,
"learning_rate": 5.768505819036391e-06,
"loss": 0.1745,
"step": 177500
},
{
"epoch": 2.1415337231405953,
"grad_norm": 0.00937980879098177,
"learning_rate": 5.728402191262222e-06,
"loss": 0.138,
"step": 178000
},
{
"epoch": 2.1475492673067205,
"grad_norm": 13.196198463439941,
"learning_rate": 5.688298563488054e-06,
"loss": 0.1717,
"step": 178500
},
{
"epoch": 2.1535648114728456,
"grad_norm": 0.1440310925245285,
"learning_rate": 5.648194935713886e-06,
"loss": 0.1515,
"step": 179000
},
{
"epoch": 2.159580355638971,
"grad_norm": 0.032506223767995834,
"learning_rate": 5.608091307939717e-06,
"loss": 0.1401,
"step": 179500
},
{
"epoch": 2.1655958998050964,
"grad_norm": 6.104779458837584e-05,
"learning_rate": 5.5679876801655484e-06,
"loss": 0.1881,
"step": 180000
},
{
"epoch": 2.1716114439712215,
"grad_norm": 0.04752574488520622,
"learning_rate": 5.52788405239138e-06,
"loss": 0.1603,
"step": 180500
},
{
"epoch": 2.177626988137347,
"grad_norm": 0.005751811899244785,
"learning_rate": 5.487780424617212e-06,
"loss": 0.1983,
"step": 181000
},
{
"epoch": 2.1836425323034723,
"grad_norm": 0.03546688333153725,
"learning_rate": 5.447757004098591e-06,
"loss": 0.1831,
"step": 181500
},
{
"epoch": 2.1896580764695974,
"grad_norm": 0.21516333520412445,
"learning_rate": 5.407653376324423e-06,
"loss": 0.1708,
"step": 182000
},
{
"epoch": 2.1956736206357226,
"grad_norm": 0.061349764466285706,
"learning_rate": 5.367549748550255e-06,
"loss": 0.1737,
"step": 182500
},
{
"epoch": 2.201689164801848,
"grad_norm": 67.89110565185547,
"learning_rate": 5.327446120776087e-06,
"loss": 0.1465,
"step": 183000
},
{
"epoch": 2.2077047089679733,
"grad_norm": 0.0042543611489236355,
"learning_rate": 5.287342493001918e-06,
"loss": 0.2034,
"step": 183500
},
{
"epoch": 2.2137202531340985,
"grad_norm": 8.772153854370117,
"learning_rate": 5.247238865227749e-06,
"loss": 0.1354,
"step": 184000
},
{
"epoch": 2.2197357973002236,
"grad_norm": 0.46139106154441833,
"learning_rate": 5.207215444709128e-06,
"loss": 0.1838,
"step": 184500
},
{
"epoch": 2.2257513414663492,
"grad_norm": 0.007118485402315855,
"learning_rate": 5.16711181693496e-06,
"loss": 0.1702,
"step": 185000
},
{
"epoch": 2.2317668856324744,
"grad_norm": 0.01587226428091526,
"learning_rate": 5.127008189160792e-06,
"loss": 0.1566,
"step": 185500
},
{
"epoch": 2.2377824297985995,
"grad_norm": 0.006735761184245348,
"learning_rate": 5.086904561386623e-06,
"loss": 0.1644,
"step": 186000
},
{
"epoch": 2.2437979739647247,
"grad_norm": 111.12641143798828,
"learning_rate": 5.046881140868003e-06,
"loss": 0.1578,
"step": 186500
},
{
"epoch": 2.2498135181308503,
"grad_norm": 0.004494801629334688,
"learning_rate": 5.0067775130938344e-06,
"loss": 0.1625,
"step": 187000
},
{
"epoch": 2.2558290622969754,
"grad_norm": 61.42302703857422,
"learning_rate": 4.966673885319666e-06,
"loss": 0.1725,
"step": 187500
},
{
"epoch": 2.2618446064631006,
"grad_norm": 102.92018127441406,
"learning_rate": 4.926570257545498e-06,
"loss": 0.1503,
"step": 188000
},
{
"epoch": 2.2678601506292257,
"grad_norm": 0.15618078410625458,
"learning_rate": 4.886466629771329e-06,
"loss": 0.1586,
"step": 188500
},
{
"epoch": 2.2738756947953513,
"grad_norm": 0.1413143128156662,
"learning_rate": 4.846363001997161e-06,
"loss": 0.1985,
"step": 189000
},
{
"epoch": 2.2798912389614765,
"grad_norm": 40.41890335083008,
"learning_rate": 4.806259374222993e-06,
"loss": 0.1302,
"step": 189500
},
{
"epoch": 2.2859067831276016,
"grad_norm": 0.0008507597958669066,
"learning_rate": 4.766235953704372e-06,
"loss": 0.1409,
"step": 190000
},
{
"epoch": 2.291922327293727,
"grad_norm": 2.4075923647615127e-05,
"learning_rate": 4.726132325930204e-06,
"loss": 0.1463,
"step": 190500
},
{
"epoch": 2.2979378714598524,
"grad_norm": 0.05570465698838234,
"learning_rate": 4.6860286981560356e-06,
"loss": 0.1641,
"step": 191000
},
{
"epoch": 2.3039534156259776,
"grad_norm": 177.34120178222656,
"learning_rate": 4.645925070381867e-06,
"loss": 0.1766,
"step": 191500
},
{
"epoch": 2.3099689597921027,
"grad_norm": 25.356895446777344,
"learning_rate": 4.605821442607698e-06,
"loss": 0.1788,
"step": 192000
},
{
"epoch": 2.3159845039582283,
"grad_norm": 26.419334411621094,
"learning_rate": 4.56571781483353e-06,
"loss": 0.1875,
"step": 192500
},
{
"epoch": 2.3220000481243535,
"grad_norm": 0.05582762509584427,
"learning_rate": 4.525614187059362e-06,
"loss": 0.1357,
"step": 193000
},
{
"epoch": 2.3280155922904786,
"grad_norm": 0.01142974104732275,
"learning_rate": 4.485510559285193e-06,
"loss": 0.148,
"step": 193500
},
{
"epoch": 2.3340311364566038,
"grad_norm": 0.013201882131397724,
"learning_rate": 4.445487138766573e-06,
"loss": 0.1279,
"step": 194000
},
{
"epoch": 2.340046680622729,
"grad_norm": 0.007936575450003147,
"learning_rate": 4.405383510992405e-06,
"loss": 0.1492,
"step": 194500
},
{
"epoch": 2.3460622247888545,
"grad_norm": 0.030919533222913742,
"learning_rate": 4.365360090473785e-06,
"loss": 0.1298,
"step": 195000
},
{
"epoch": 2.3520777689549797,
"grad_norm": 0.0013994334731251001,
"learning_rate": 4.325256462699616e-06,
"loss": 0.1555,
"step": 195500
},
{
"epoch": 2.358093313121105,
"grad_norm": 0.004653325304389,
"learning_rate": 4.285152834925448e-06,
"loss": 0.1612,
"step": 196000
},
{
"epoch": 2.3641088572872304,
"grad_norm": 0.034009434282779694,
"learning_rate": 4.2450492071512796e-06,
"loss": 0.1543,
"step": 196500
},
{
"epoch": 2.3701244014533556,
"grad_norm": 0.6855669021606445,
"learning_rate": 4.20502578663266e-06,
"loss": 0.2072,
"step": 197000
},
{
"epoch": 2.3761399456194807,
"grad_norm": 45.97907257080078,
"learning_rate": 4.164922158858491e-06,
"loss": 0.1637,
"step": 197500
},
{
"epoch": 2.382155489785606,
"grad_norm": 0.36125409603118896,
"learning_rate": 4.1248185310843216e-06,
"loss": 0.142,
"step": 198000
},
{
"epoch": 2.388171033951731,
"grad_norm": 0.013704453594982624,
"learning_rate": 4.084714903310153e-06,
"loss": 0.1702,
"step": 198500
},
{
"epoch": 2.3941865781178566,
"grad_norm": 28.00567626953125,
"learning_rate": 4.044611275535985e-06,
"loss": 0.1641,
"step": 199000
},
{
"epoch": 2.4002021222839818,
"grad_norm": 0.4930565655231476,
"learning_rate": 4.0046680622729135e-06,
"loss": 0.147,
"step": 199500
},
{
"epoch": 2.406217666450107,
"grad_norm": 1.68142831325531,
"learning_rate": 3.964564434498745e-06,
"loss": 0.1543,
"step": 200000
},
{
"epoch": 2.4122332106162325,
"grad_norm": 0.00046357992687262595,
"learning_rate": 3.924460806724576e-06,
"loss": 0.1383,
"step": 200500
},
{
"epoch": 2.4182487547823577,
"grad_norm": 0.08767001330852509,
"learning_rate": 3.884357178950408e-06,
"loss": 0.1697,
"step": 201000
},
{
"epoch": 2.424264298948483,
"grad_norm": 14.962833404541016,
"learning_rate": 3.84425355117624e-06,
"loss": 0.1919,
"step": 201500
},
{
"epoch": 2.430279843114608,
"grad_norm": 0.0750858262181282,
"learning_rate": 3.8041499234020714e-06,
"loss": 0.1238,
"step": 202000
},
{
"epoch": 2.4362953872807336,
"grad_norm": 0.011100132018327713,
"learning_rate": 3.7640462956279028e-06,
"loss": 0.1584,
"step": 202500
},
{
"epoch": 2.4423109314468587,
"grad_norm": 0.0002675870491657406,
"learning_rate": 3.723942667853734e-06,
"loss": 0.1393,
"step": 203000
},
{
"epoch": 2.448326475612984,
"grad_norm": 22.56096839904785,
"learning_rate": 3.683839040079566e-06,
"loss": 0.1284,
"step": 203500
},
{
"epoch": 2.454342019779109,
"grad_norm": 0.0010695152450352907,
"learning_rate": 3.643815619560946e-06,
"loss": 0.1636,
"step": 204000
},
{
"epoch": 2.4603575639452346,
"grad_norm": 0.05840376392006874,
"learning_rate": 3.6037119917867774e-06,
"loss": 0.1824,
"step": 204500
},
{
"epoch": 2.46637310811136,
"grad_norm": 0.0005692685954272747,
"learning_rate": 3.563608364012609e-06,
"loss": 0.1189,
"step": 205000
},
{
"epoch": 2.472388652277485,
"grad_norm": 12.676384925842285,
"learning_rate": 3.5235047362384407e-06,
"loss": 0.1589,
"step": 205500
},
{
"epoch": 2.47840419644361,
"grad_norm": 0.024767233058810234,
"learning_rate": 3.483401108464272e-06,
"loss": 0.179,
"step": 206000
},
{
"epoch": 2.4844197406097357,
"grad_norm": 0.014465268701314926,
"learning_rate": 3.443297480690104e-06,
"loss": 0.134,
"step": 206500
},
{
"epoch": 2.490435284775861,
"grad_norm": 46.07574462890625,
"learning_rate": 3.4031938529159353e-06,
"loss": 0.1839,
"step": 207000
},
{
"epoch": 2.496450828941986,
"grad_norm": 2.9257161617279053,
"learning_rate": 3.3630902251417667e-06,
"loss": 0.1522,
"step": 207500
},
{
"epoch": 2.5024663731081116,
"grad_norm": 0.010684625245630741,
"learning_rate": 3.3230668046231463e-06,
"loss": 0.137,
"step": 208000
},
{
"epoch": 2.5084819172742367,
"grad_norm": 0.023651426658034325,
"learning_rate": 3.2829631768489777e-06,
"loss": 0.1893,
"step": 208500
},
{
"epoch": 2.514497461440362,
"grad_norm": 5.37974214553833,
"learning_rate": 3.242939756330358e-06,
"loss": 0.1688,
"step": 209000
},
{
"epoch": 2.520513005606487,
"grad_norm": 0.21090950071811676,
"learning_rate": 3.202836128556189e-06,
"loss": 0.173,
"step": 209500
},
{
"epoch": 2.526528549772612,
"grad_norm": 24.091960906982422,
"learning_rate": 3.162732500782021e-06,
"loss": 0.1614,
"step": 210000
},
{
"epoch": 2.532544093938738,
"grad_norm": 0.032513245940208435,
"learning_rate": 3.1226288730078524e-06,
"loss": 0.1816,
"step": 210500
},
{
"epoch": 2.538559638104863,
"grad_norm": 0.045939259231090546,
"learning_rate": 3.082525245233684e-06,
"loss": 0.1646,
"step": 211000
},
{
"epoch": 2.544575182270988,
"grad_norm": 0.7573652863502502,
"learning_rate": 3.0424216174595156e-06,
"loss": 0.1224,
"step": 211500
},
{
"epoch": 2.5505907264371137,
"grad_norm": 3.404059648513794,
"learning_rate": 3.0023981969408957e-06,
"loss": 0.1475,
"step": 212000
},
{
"epoch": 2.556606270603239,
"grad_norm": 0.0026354603469371796,
"learning_rate": 2.962294569166727e-06,
"loss": 0.1591,
"step": 212500
},
{
"epoch": 2.562621814769364,
"grad_norm": 21.002397537231445,
"learning_rate": 2.9221909413925585e-06,
"loss": 0.133,
"step": 213000
},
{
"epoch": 2.568637358935489,
"grad_norm": 0.020220952108502388,
"learning_rate": 2.8820873136183903e-06,
"loss": 0.144,
"step": 213500
},
{
"epoch": 2.5746529031016143,
"grad_norm": 0.02407023496925831,
"learning_rate": 2.84206389309977e-06,
"loss": 0.1522,
"step": 214000
},
{
"epoch": 2.58066844726774,
"grad_norm": 0.05026541277766228,
"learning_rate": 2.8019602653256018e-06,
"loss": 0.1751,
"step": 214500
},
{
"epoch": 2.586683991433865,
"grad_norm": 0.0013814900303259492,
"learning_rate": 2.761856637551433e-06,
"loss": 0.1606,
"step": 215000
},
{
"epoch": 2.5926995355999902,
"grad_norm": 0.09587771445512772,
"learning_rate": 2.721753009777265e-06,
"loss": 0.1553,
"step": 215500
},
{
"epoch": 2.598715079766116,
"grad_norm": 0.014226296916604042,
"learning_rate": 2.6816493820030964e-06,
"loss": 0.1732,
"step": 216000
},
{
"epoch": 2.604730623932241,
"grad_norm": 0.020662061870098114,
"learning_rate": 2.641545754228928e-06,
"loss": 0.1851,
"step": 216500
},
{
"epoch": 2.610746168098366,
"grad_norm": 0.020077265799045563,
"learning_rate": 2.6015223337103074e-06,
"loss": 0.1672,
"step": 217000
},
{
"epoch": 2.6167617122644913,
"grad_norm": 17.917278289794922,
"learning_rate": 2.561418705936139e-06,
"loss": 0.1757,
"step": 217500
},
{
"epoch": 2.622777256430617,
"grad_norm": 0.010099813342094421,
"learning_rate": 2.5213150781619707e-06,
"loss": 0.154,
"step": 218000
},
{
"epoch": 2.628792800596742,
"grad_norm": 50.070430755615234,
"learning_rate": 2.4812114503878025e-06,
"loss": 0.1557,
"step": 218500
},
{
"epoch": 2.634808344762867,
"grad_norm": 0.0430847629904747,
"learning_rate": 2.441107822613634e-06,
"loss": 0.1578,
"step": 219000
},
{
"epoch": 2.640823888928993,
"grad_norm": 45.390384674072266,
"learning_rate": 2.4010844020950135e-06,
"loss": 0.1584,
"step": 219500
},
{
"epoch": 2.646839433095118,
"grad_norm": 0.01655266433954239,
"learning_rate": 2.361141188831942e-06,
"loss": 0.1926,
"step": 220000
},
{
"epoch": 2.652854977261243,
"grad_norm": 0.006697576493024826,
"learning_rate": 2.3210375610577732e-06,
"loss": 0.1469,
"step": 220500
},
{
"epoch": 2.6588705214273682,
"grad_norm": 0.055724818259477615,
"learning_rate": 2.280933933283605e-06,
"loss": 0.1548,
"step": 221000
},
{
"epoch": 2.6648860655934934,
"grad_norm": 0.09359436482191086,
"learning_rate": 2.2408303055094365e-06,
"loss": 0.1523,
"step": 221500
},
{
"epoch": 2.670901609759619,
"grad_norm": 0.005888832733035088,
"learning_rate": 2.2007266777352683e-06,
"loss": 0.1896,
"step": 222000
},
{
"epoch": 2.676917153925744,
"grad_norm": 0.018979087471961975,
"learning_rate": 2.1606230499610997e-06,
"loss": 0.123,
"step": 222500
},
{
"epoch": 2.6829326980918693,
"grad_norm": 0.11329194158315659,
"learning_rate": 2.1205194221869315e-06,
"loss": 0.1556,
"step": 223000
},
{
"epoch": 2.688948242257995,
"grad_norm": 0.018361905589699745,
"learning_rate": 2.080415794412763e-06,
"loss": 0.1621,
"step": 223500
},
{
"epoch": 2.69496378642412,
"grad_norm": 0.00052923389011994,
"learning_rate": 2.0403121666385943e-06,
"loss": 0.1309,
"step": 224000
},
{
"epoch": 2.700979330590245,
"grad_norm": 0.030753634870052338,
"learning_rate": 2.0002085388644257e-06,
"loss": 0.1401,
"step": 224500
},
{
"epoch": 2.7069948747563704,
"grad_norm": 0.01107688806951046,
"learning_rate": 1.9601049110902575e-06,
"loss": 0.1444,
"step": 225000
},
{
"epoch": 2.7130104189224955,
"grad_norm": 0.0049855210818350315,
"learning_rate": 1.920001283316089e-06,
"loss": 0.1398,
"step": 225500
},
{
"epoch": 2.719025963088621,
"grad_norm": 52.67742156982422,
"learning_rate": 1.8798976555419205e-06,
"loss": 0.1743,
"step": 226000
},
{
"epoch": 2.7250415072547463,
"grad_norm": 0.17802126705646515,
"learning_rate": 1.839794027767752e-06,
"loss": 0.1774,
"step": 226500
},
{
"epoch": 2.7310570514208714,
"grad_norm": 0.03584326431155205,
"learning_rate": 1.7996903999935835e-06,
"loss": 0.1282,
"step": 227000
},
{
"epoch": 2.737072595586997,
"grad_norm": 0.08680440485477448,
"learning_rate": 1.7595867722194151e-06,
"loss": 0.1281,
"step": 227500
},
{
"epoch": 2.743088139753122,
"grad_norm": 0.2684645354747772,
"learning_rate": 1.719563351700795e-06,
"loss": 0.1414,
"step": 228000
},
{
"epoch": 2.7491036839192473,
"grad_norm": 0.027800705283880234,
"learning_rate": 1.679539931182175e-06,
"loss": 0.1622,
"step": 228500
},
{
"epoch": 2.7551192280853725,
"grad_norm": 11.01578140258789,
"learning_rate": 1.6394363034080062e-06,
"loss": 0.1396,
"step": 229000
},
{
"epoch": 2.7611347722514976,
"grad_norm": 0.0035239350982010365,
"learning_rate": 1.5993326756338379e-06,
"loss": 0.1867,
"step": 229500
},
{
"epoch": 2.767150316417623,
"grad_norm": 0.04345840960741043,
"learning_rate": 1.5592290478596695e-06,
"loss": 0.1404,
"step": 230000
},
{
"epoch": 2.7731658605837484,
"grad_norm": 0.003262243466451764,
"learning_rate": 1.5192056273410493e-06,
"loss": 0.1605,
"step": 230500
},
{
"epoch": 2.7791814047498735,
"grad_norm": 0.4090683162212372,
"learning_rate": 1.479101999566881e-06,
"loss": 0.1547,
"step": 231000
},
{
"epoch": 2.785196948915999,
"grad_norm": 0.012451679445803165,
"learning_rate": 1.4389983717927125e-06,
"loss": 0.1147,
"step": 231500
},
{
"epoch": 2.7912124930821243,
"grad_norm": 0.008698958903551102,
"learning_rate": 1.3988947440185442e-06,
"loss": 0.1869,
"step": 232000
},
{
"epoch": 2.7972280372482494,
"grad_norm": 1.2418529987335205,
"learning_rate": 1.3587911162443755e-06,
"loss": 0.1428,
"step": 232500
},
{
"epoch": 2.8032435814143746,
"grad_norm": 0.24010685086250305,
"learning_rate": 1.3186874884702072e-06,
"loss": 0.172,
"step": 233000
},
{
"epoch": 2.8092591255805,
"grad_norm": 0.008175286464393139,
"learning_rate": 1.2785838606960388e-06,
"loss": 0.1932,
"step": 233500
},
{
"epoch": 2.8152746697466253,
"grad_norm": 0.009441105648875237,
"learning_rate": 1.2384802329218702e-06,
"loss": 0.1202,
"step": 234000
},
{
"epoch": 2.8212902139127505,
"grad_norm": 0.0015565038193017244,
"learning_rate": 1.1984568124032502e-06,
"loss": 0.1215,
"step": 234500
},
{
"epoch": 2.827305758078876,
"grad_norm": 71.70852661132812,
"learning_rate": 1.1583531846290816e-06,
"loss": 0.1498,
"step": 235000
},
{
"epoch": 2.8333213022450012,
"grad_norm": 4.693429470062256,
"learning_rate": 1.1182495568549132e-06,
"loss": 0.1463,
"step": 235500
},
{
"epoch": 2.8393368464111264,
"grad_norm": 0.04238261282444,
"learning_rate": 1.0781459290807446e-06,
"loss": 0.1874,
"step": 236000
},
{
"epoch": 2.8453523905772515,
"grad_norm": 0.091603122651577,
"learning_rate": 1.0380423013065762e-06,
"loss": 0.1495,
"step": 236500
},
{
"epoch": 2.8513679347433767,
"grad_norm": 26.191734313964844,
"learning_rate": 9.979386735324079e-07,
"loss": 0.1246,
"step": 237000
},
{
"epoch": 2.8573834789095023,
"grad_norm": 0.0009205593378283083,
"learning_rate": 9.579152530137877e-07,
"loss": 0.1417,
"step": 237500
},
{
"epoch": 2.8633990230756274,
"grad_norm": 0.0035299675073474646,
"learning_rate": 9.178116252396192e-07,
"loss": 0.1665,
"step": 238000
},
{
"epoch": 2.8694145672417526,
"grad_norm": 0.004722926300019026,
"learning_rate": 8.777079974654508e-07,
"loss": 0.144,
"step": 238500
},
{
"epoch": 2.875430111407878,
"grad_norm": 0.007760610897094011,
"learning_rate": 8.376043696912823e-07,
"loss": 0.1325,
"step": 239000
},
{
"epoch": 2.8814456555740033,
"grad_norm": 0.0008993456140160561,
"learning_rate": 7.975007419171139e-07,
"loss": 0.1572,
"step": 239500
},
{
"epoch": 2.8874611997401285,
"grad_norm": 0.18450585007667542,
"learning_rate": 7.574773213984937e-07,
"loss": 0.1473,
"step": 240000
},
{
"epoch": 2.8934767439062536,
"grad_norm": 0.0713871493935585,
"learning_rate": 7.173736936243253e-07,
"loss": 0.1216,
"step": 240500
},
{
"epoch": 2.899492288072379,
"grad_norm": 0.0023451121523976326,
"learning_rate": 6.773502731057053e-07,
"loss": 0.1409,
"step": 241000
},
{
"epoch": 2.9055078322385044,
"grad_norm": 0.014280580915510654,
"learning_rate": 6.372466453315368e-07,
"loss": 0.1946,
"step": 241500
},
{
"epoch": 2.9115233764046295,
"grad_norm": 0.008721483871340752,
"learning_rate": 5.971430175573683e-07,
"loss": 0.1555,
"step": 242000
},
{
"epoch": 2.9175389205707547,
"grad_norm": 0.039840217679739,
"learning_rate": 5.570393897831999e-07,
"loss": 0.1427,
"step": 242500
},
{
"epoch": 2.9235544647368803,
"grad_norm": 0.002139493590220809,
"learning_rate": 5.170159692645797e-07,
"loss": 0.1436,
"step": 243000
},
{
"epoch": 2.9295700089030055,
"grad_norm": 0.0007610457250848413,
"learning_rate": 4.769123414904113e-07,
"loss": 0.1393,
"step": 243500
},
{
"epoch": 2.9355855530691306,
"grad_norm": 0.05628383159637451,
"learning_rate": 4.368087137162428e-07,
"loss": 0.1699,
"step": 244000
},
{
"epoch": 2.9416010972352558,
"grad_norm": 0.02916884608566761,
"learning_rate": 3.9670508594207435e-07,
"loss": 0.1401,
"step": 244500
},
{
"epoch": 2.947616641401381,
"grad_norm": 0.11827978491783142,
"learning_rate": 3.5660145816790586e-07,
"loss": 0.168,
"step": 245000
},
{
"epoch": 2.9536321855675065,
"grad_norm": 0.0027151680551469326,
"learning_rate": 3.165780376492858e-07,
"loss": 0.1561,
"step": 245500
},
{
"epoch": 2.9596477297336317,
"grad_norm": 0.040678609162569046,
"learning_rate": 2.764744098751173e-07,
"loss": 0.1305,
"step": 246000
},
{
"epoch": 2.965663273899757,
"grad_norm": 7.518092632293701,
"learning_rate": 2.3637078210094888e-07,
"loss": 0.2053,
"step": 246500
},
{
"epoch": 2.9716788180658824,
"grad_norm": 0.18074116110801697,
"learning_rate": 1.962671543267804e-07,
"loss": 0.1548,
"step": 247000
},
{
"epoch": 2.9776943622320076,
"grad_norm": 7.7608208656311035,
"learning_rate": 1.5616352655261197e-07,
"loss": 0.1676,
"step": 247500
},
{
"epoch": 2.9837099063981327,
"grad_norm": 0.00906237680464983,
"learning_rate": 1.1605989877844351e-07,
"loss": 0.1534,
"step": 248000
},
{
"epoch": 2.989725450564258,
"grad_norm": 179.03021240234375,
"learning_rate": 7.595627100427505e-08,
"loss": 0.1176,
"step": 248500
},
{
"epoch": 2.9957409947303835,
"grad_norm": 0.00344484462402761,
"learning_rate": 3.58526432301066e-08,
"loss": 0.1711,
"step": 249000
},
{
"epoch": 3.0,
"eval_accuracy": 0.6820289064630488,
"eval_f1": 0.7003498950314906,
"eval_loss": 0.4521113932132721,
"eval_roc_auc": 0.8145350422688847,
"eval_runtime": 110.6722,
"eval_samples_per_second": 66.268,
"eval_steps_per_second": 66.268,
"step": 249354
}
],
"logging_steps": 500,
"max_steps": 249354,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.6370896223238963e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}