| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.0, | |
| "global_step": 1404, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.8604651162790697e-05, | |
| "loss": 1.2577, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3.7209302325581394e-05, | |
| "loss": 1.2575, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 5.5813953488372095e-05, | |
| "loss": 1.2283, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 7.441860465116279e-05, | |
| "loss": 1.2528, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.30232558139535e-05, | |
| "loss": 1.2608, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00011162790697674419, | |
| "loss": 1.2019, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.0001302325581395349, | |
| "loss": 1.2713, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00014883720930232558, | |
| "loss": 1.236, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00016744186046511629, | |
| "loss": 1.2529, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.000186046511627907, | |
| "loss": 1.2585, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0002046511627906977, | |
| "loss": 1.2354, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00022325581395348838, | |
| "loss": 1.2448, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00024186046511627906, | |
| "loss": 1.2007, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0002604651162790698, | |
| "loss": 1.2464, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0002790697674418605, | |
| "loss": 1.2414, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00029767441860465116, | |
| "loss": 1.2117, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00031627906976744186, | |
| "loss": 1.2624, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00033488372093023257, | |
| "loss": 1.2599, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0003534883720930233, | |
| "loss": 1.2381, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.000372093023255814, | |
| "loss": 1.2065, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00039069767441860464, | |
| "loss": 1.1707, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.0003999994671765845, | |
| "loss": 1.1943, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00039999520460629425, | |
| "loss": 1.1769, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0003999866795565616, | |
| "loss": 1.1891, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0003999738922090801, | |
| "loss": 1.1709, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00039995684283638535, | |
| "loss": 1.1636, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00039993553180184906, | |
| "loss": 1.1606, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00039990995955967125, | |
| "loss": 1.1322, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0003998801266548709, | |
| "loss": 1.1243, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0003998460337232739, | |
| "loss": 1.1555, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.0003998076814914998, | |
| "loss": 1.0912, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.0003997650707769462, | |
| "loss": 1.1373, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00039971820248777137, | |
| "loss": 1.1136, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0003996670776228748, | |
| "loss": 1.1003, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00039961169727187605, | |
| "loss": 1.1401, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0003995520626150915, | |
| "loss": 1.0842, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00039948817492350914, | |
| "loss": 1.1078, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0003994200355587614, | |
| "loss": 1.0637, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00039934764597309634, | |
| "loss": 1.1324, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0003992710077093466, | |
| "loss": 1.0891, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0003991901224008964, | |
| "loss": 1.0628, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00039910499177164686, | |
| "loss": 1.0904, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0003990156176359794, | |
| "loss": 1.0331, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00039892200189871666, | |
| "loss": 1.06, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00039882414655508235, | |
| "loss": 1.0381, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0003987220536906583, | |
| "loss": 1.0759, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0003986157254813405, | |
| "loss": 1.032, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00039850516419329227, | |
| "loss": 1.0713, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0003983903721828961, | |
| "loss": 1.063, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0003982713518967037, | |
| "loss": 1.0361, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00039814810587138346, | |
| "loss": 1.0341, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0003980206367336666, | |
| "loss": 1.01, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0003978889472002911, | |
| "loss": 1.0464, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0003977530400779441, | |
| "loss": 1.0334, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0003976129182632014, | |
| "loss": 1.0365, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00039746858474246647, | |
| "loss": 1.0019, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0003973200425919064, | |
| "loss": 1.0059, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0003971672949773864, | |
| "loss": 1.06, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00039701034515440234, | |
| "loss": 0.9708, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00039684919646801146, | |
| "loss": 0.9961, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00039668385235276085, | |
| "loss": 1.0084, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0003965143163326145, | |
| "loss": 0.982, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00039634059202087814, | |
| "loss": 1.0116, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0003961626831201219, | |
| "loss": 0.9486, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.0003959805934221019, | |
| "loss": 0.9435, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0003957943268076793, | |
| "loss": 0.9654, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00039560388724673725, | |
| "loss": 0.9505, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00039540927879809664, | |
| "loss": 0.9705, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0003952105056094295, | |
| "loss": 0.9316, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00039500757191717053, | |
| "loss": 0.9281, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.0003948004820464269, | |
| "loss": 0.9292, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00039458924041088606, | |
| "loss": 0.9307, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00039437385151272155, | |
| "loss": 0.9195, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.0003941543199424972, | |
| "loss": 0.9232, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0003939306503790693, | |
| "loss": 0.9023, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00039370284758948667, | |
| "loss": 0.9184, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0003934709164288892, | |
| "loss": 0.8961, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0003932348618404045, | |
| "loss": 0.9495, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.0003929946888550423, | |
| "loss": 0.9092, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.0003927504025915875, | |
| "loss": 0.9123, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.0003925020082564907, | |
| "loss": 0.8893, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00039224951114375764, | |
| "loss": 0.8835, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0003919929166348361, | |
| "loss": 0.9182, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00039173223019850126, | |
| "loss": 0.8915, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00039146745739073927, | |
| "loss": 0.9062, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00039119860385462867, | |
| "loss": 0.8893, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00039092567532022024, | |
| "loss": 0.8561, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00039064867760441464, | |
| "loss": 0.8803, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0003903676166108389, | |
| "loss": 0.874, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.0003900824983297202, | |
| "loss": 0.852, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00038979332883775817, | |
| "loss": 0.8907, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00038950011429799574, | |
| "loss": 0.8538, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00038920286095968755, | |
| "loss": 0.8809, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.0003889015751581666, | |
| "loss": 0.8811, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00038859626331470967, | |
| "loss": 0.8574, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.0003882869319363999, | |
| "loss": 0.8522, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.0003879735876159887, | |
| "loss": 0.889, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00038765623703175474, | |
| "loss": 0.8101, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.0003873348869473619, | |
| "loss": 0.8479, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00038700954421171503, | |
| "loss": 0.797, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00038668021575881406, | |
| "loss": 0.7963, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00038634690860760607, | |
| "loss": 0.8253, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00038600962986183574, | |
| "loss": 0.8512, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00038566838670989407, | |
| "loss": 0.8082, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00038532318642466506, | |
| "loss": 0.7988, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.0003849740363633706, | |
| "loss": 0.8149, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00038462094396741407, | |
| "loss": 0.8348, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.0003842639167622212, | |
| "loss": 0.787, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00038390296235707997, | |
| "loss": 0.8334, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00038353808844497866, | |
| "loss": 0.8091, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.0003831693028024414, | |
| "loss": 0.7977, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00038279661328936273, | |
| "loss": 0.7975, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00038242002784884006, | |
| "loss": 0.8279, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0003820395545070044, | |
| "loss": 0.8101, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.000381655201372849, | |
| "loss": 0.8091, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00038126697663805705, | |
| "loss": 0.7846, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.0003808748885768267, | |
| "loss": 0.7447, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.0003804789455456948, | |
| "loss": 0.7934, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.0003800791559833588, | |
| "loss": 0.7674, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.000379675528410497, | |
| "loss": 0.8027, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.0003792680714295868, | |
| "loss": 0.7721, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0003788567937247215, | |
| "loss": 0.7928, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0003784417040614249, | |
| "loss": 0.7528, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00037802281128646514, | |
| "loss": 0.7643, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.0003776001243276653, | |
| "loss": 0.7411, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00037717365219371387, | |
| "loss": 0.7973, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00037674340397397224, | |
| "loss": 0.7675, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.0003763093888382813, | |
| "loss": 0.7273, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00037587161603676577, | |
| "loss": 0.7177, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00037543009489963714, | |
| "loss": 0.7306, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.0003749848348369949, | |
| "loss": 0.776, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.000374535845338626, | |
| "loss": 0.7211, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.0003740831359738023, | |
| "loss": 0.755, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00037362671639107705, | |
| "loss": 0.7184, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00037316659631807887, | |
| "loss": 0.7762, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00037270278556130463, | |
| "loss": 0.7511, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00037223529400591055, | |
| "loss": 0.6934, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.000371764131615501, | |
| "loss": 0.734, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00037128930843191707, | |
| "loss": 0.6909, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00037081083457502146, | |
| "loss": 0.7328, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00037032872024248377, | |
| "loss": 0.7237, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.0003698429757095623, | |
| "loss": 0.695, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.0003693536113288859, | |
| "loss": 0.7213, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00036886063753023264, | |
| "loss": 0.7162, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.0003683640648203078, | |
| "loss": 0.7244, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00036786390378252, | |
| "loss": 0.7237, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00036736016507675566, | |
| "loss": 0.7422, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00036685285943915144, | |
| "loss": 0.6849, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.0003663419976818659, | |
| "loss": 0.6817, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.0003658275906928488, | |
| "loss": 0.6784, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.000365309649435609, | |
| "loss": 0.7205, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.0003647881849489811, | |
| "loss": 0.7011, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.0003642632083468898, | |
| "loss": 0.6943, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00036373473081811327, | |
| "loss": 0.7404, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00036320276362604453, | |
| "loss": 0.6986, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.0003626673181084516, | |
| "loss": 0.6867, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00036212840567723554, | |
| "loss": 0.6806, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00036158603781818766, | |
| "loss": 0.6978, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00036104022609074435, | |
| "loss": 0.6866, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00036049098212774083, | |
| "loss": 0.6515, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.0003599383176351633, | |
| "loss": 0.6801, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00035938224439189935, | |
| "loss": 0.6599, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.000358822774249487, | |
| "loss": 0.6567, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.0003582599191318621, | |
| "loss": 0.7001, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.000357693691035104, | |
| "loss": 0.6653, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00035712410202718033, | |
| "loss": 0.6879, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00035655116424768916, | |
| "loss": 0.6308, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.0003559748899076009, | |
| "loss": 0.6673, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.0003553952912889978, | |
| "loss": 0.6634, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.0003548123807448118, | |
| "loss": 0.6576, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00035422617069856197, | |
| "loss": 0.6552, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.0003536366736440892, | |
| "loss": 0.6709, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00035304390214529015, | |
| "loss": 0.6434, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.0003524478688358492, | |
| "loss": 0.6508, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0003518485864189699, | |
| "loss": 0.6753, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00035124606766710314, | |
| "loss": 0.6838, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00035064032542167587, | |
| "loss": 0.5915, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00035003137259281695, | |
| "loss": 0.6328, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.0003494192221590822, | |
| "loss": 0.6565, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.0003488038871671776, | |
| "loss": 0.656, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00034818538073168136, | |
| "loss": 0.6013, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.0003475637160347645, | |
| "loss": 0.6271, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.0003469389063259094, | |
| "loss": 0.6441, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.0003463109649216282, | |
| "loss": 0.6319, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.00034567990520517844, | |
| "loss": 0.6292, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.0003450457406262779, | |
| "loss": 0.5924, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.0003444084847008181, | |
| "loss": 0.635, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.0003437681510105761, | |
| "loss": 0.6007, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.000343124753202925, | |
| "loss": 0.6321, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.00034247830499054336, | |
| "loss": 0.615, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.00034182882015112244, | |
| "loss": 0.6328, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.0003411763125270731, | |
| "loss": 0.5974, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.0003405207960252305, | |
| "loss": 0.618, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.00033986228461655766, | |
| "loss": 0.6112, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.00033920079233584776, | |
| "loss": 0.6346, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.00033853633328142515, | |
| "loss": 0.594, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.00033786892161484456, | |
| "loss": 0.576, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.0003371985715605897, | |
| "loss": 0.6082, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.0003365252974057698, | |
| "loss": 0.6075, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.00033584911349981495, | |
| "loss": 0.5957, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.0003351700342541708, | |
| "loss": 0.5885, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.0003344880741419911, | |
| "loss": 0.6087, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.0003338032476978289, | |
| "loss": 0.5699, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.00033311556951732754, | |
| "loss": 0.6346, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.00033242505425690893, | |
| "loss": 0.5821, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.0003317317166334615, | |
| "loss": 0.5686, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.00033103557142402624, | |
| "loss": 0.5742, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.0003303366334654822, | |
| "loss": 0.6053, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.00032963491765423, | |
| "loss": 0.6033, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.00032893043894587423, | |
| "loss": 0.5794, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.000328223212354905, | |
| "loss": 0.6071, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.0003275132529543778, | |
| "loss": 0.5719, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.0003268005758755921, | |
| "loss": 0.5795, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.00032608519630776913, | |
| "loss": 0.5689, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.00032536712949772793, | |
| "loss": 0.5605, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.0003246463907495605, | |
| "loss": 0.5789, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.0003239229954243058, | |
| "loss": 0.6234, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.0003231969589396218, | |
| "loss": 0.5473, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.0003224682967694576, | |
| "loss": 0.5777, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.0003217370244437231, | |
| "loss": 0.5794, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.0003210031575479583, | |
| "loss": 0.5729, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.000320266711723001, | |
| "loss": 0.5354, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.00031952770266465346, | |
| "loss": 0.5975, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.00031878614612334806, | |
| "loss": 0.5648, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.00031804205790381113, | |
| "loss": 0.5529, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.00031729545386472666, | |
| "loss": 0.5645, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.00031654634991839804, | |
| "loss": 0.5496, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.000315794762030409, | |
| "loss": 0.5262, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.0003150407062192831, | |
| "loss": 0.5253, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.0003142841985561427, | |
| "loss": 0.5519, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.0003135252551643662, | |
| "loss": 0.5331, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.0003127638922192445, | |
| "loss": 0.5431, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.0003120001259476361, | |
| "loss": 0.5293, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.00031123397262762157, | |
| "loss": 0.5695, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.0003104654485881562, | |
| "loss": 0.5577, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.0003096945702087223, | |
| "loss": 0.5517, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00030892135391898015, | |
| "loss": 0.5678, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.0003081458161984175, | |
| "loss": 0.5548, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.0003073679735759987, | |
| "loss": 0.5325, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.000306587842629812, | |
| "loss": 0.5451, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.00030580543998671685, | |
| "loss": 0.597, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00030502078232198885, | |
| "loss": 0.5763, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.0003042338863589649, | |
| "loss": 0.5401, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.00030344476886868643, | |
| "loss": 0.5923, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.000302653446669542, | |
| "loss": 0.5462, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.0003018599366269091, | |
| "loss": 0.5558, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.0003010642556527945, | |
| "loss": 0.5426, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.00030026642070547365, | |
| "loss": 0.5492, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.0002994664487891297, | |
| "loss": 0.5124, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.0002986643569534906, | |
| "loss": 0.5248, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.0002978601622934661, | |
| "loss": 0.5365, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.0002970538819487831, | |
| "loss": 0.5602, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.00029624553310362065, | |
| "loss": 0.5596, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.0002954351329862435, | |
| "loss": 0.5358, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.000294622698868635, | |
| "loss": 0.5489, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.00029380824806612895, | |
| "loss": 0.5386, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.0002929917979370406, | |
| "loss": 0.47, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.0002921733658822965, | |
| "loss": 0.533, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.000291352969345064, | |
| "loss": 0.5462, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.00029053062581037916, | |
| "loss": 0.5371, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00028970635280477436, | |
| "loss": 0.5198, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.0002888801678959045, | |
| "loss": 0.537, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.0002880520886921727, | |
| "loss": 0.5412, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.000287222132842355, | |
| "loss": 0.5242, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.0002863903180352244, | |
| "loss": 0.522, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.0002855566619991733, | |
| "loss": 0.5238, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.0002847211825018366, | |
| "loss": 0.5555, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.00028388389734971197, | |
| "loss": 0.5228, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.000283044824387781, | |
| "loss": 0.5319, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.0002822039814991288, | |
| "loss": 0.5281, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.00028136138660456265, | |
| "loss": 0.4977, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 0.00028051705766223034, | |
| "loss": 0.4931, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.00027967101266723694, | |
| "loss": 0.5028, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.0002788232696512619, | |
| "loss": 0.5225, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.00027797384668217434, | |
| "loss": 0.4941, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.00027712276186364794, | |
| "loss": 0.5071, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.00027627003333477543, | |
| "loss": 0.5207, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.0002754156792696817, | |
| "loss": 0.5227, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.0002745597178771365, | |
| "loss": 0.5112, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 0.0002737021674001665, | |
| "loss": 0.5078, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 0.0002728430461156662, | |
| "loss": 0.5128, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.0002719823723340087, | |
| "loss": 0.513, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.00027112016439865546, | |
| "loss": 0.5119, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.00027025644068576503, | |
| "loss": 0.5165, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.00026939121960380157, | |
| "loss": 0.5215, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 0.0002685245195931427, | |
| "loss": 0.5127, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.0002676563591256861, | |
| "loss": 0.5072, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.0002667867567044562, | |
| "loss": 0.4924, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.0002659157308632096, | |
| "loss": 0.4821, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.00026504330016604015, | |
| "loss": 0.4862, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.0002641694832069832, | |
| "loss": 0.4877, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.00026329429860961947, | |
| "loss": 0.4702, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.0002624177650266779, | |
| "loss": 0.5067, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.00026153990113963826, | |
| "loss": 0.4797, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.0002606607256583331, | |
| "loss": 0.5172, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.00025978025732054875, | |
| "loss": 0.5014, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.0002588985148916261, | |
| "loss": 0.4776, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.0002580155171640606, | |
| "loss": 0.4959, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.0002571312829571019, | |
| "loss": 0.457, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.0002562458311163523, | |
| "loss": 0.5027, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00025535918051336574, | |
| "loss": 0.4833, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00025447135004524516, | |
| "loss": 0.4837, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.00025358235863423984, | |
| "loss": 0.4873, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.00025269222522734215, | |
| "loss": 0.478, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.00025180096879588373, | |
| "loss": 0.509, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.000250908608335131, | |
| "loss": 0.5005, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.0002500151628638807, | |
| "loss": 0.4677, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.0002491206514240541, | |
| "loss": 0.482, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.0002482250930802915, | |
| "loss": 0.4808, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.00024732850691954557, | |
| "loss": 0.4488, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.00024643091205067496, | |
| "loss": 0.4671, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.0002455323276040367, | |
| "loss": 0.4483, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.0002446327727310787, | |
| "loss": 0.49, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.00024373226660393134, | |
| "loss": 0.4837, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.0002428308284149992, | |
| "loss": 0.4797, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.00024192847737655152, | |
| "loss": 0.4832, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.0002410252327203133, | |
| "loss": 0.4833, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.00024012111369705488, | |
| "loss": 0.445, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.00023921613957618215, | |
| "loss": 0.4898, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.00023831032964532543, | |
| "loss": 0.4769, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.00023740370320992865, | |
| "loss": 0.4595, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.00023649627959283792, | |
| "loss": 0.4658, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.00023558807813388937, | |
| "loss": 0.4783, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.00023467911818949753, | |
| "loss": 0.4618, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.00023376941913224206, | |
| "loss": 0.4526, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.0002328590003504557, | |
| "loss": 0.4492, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.00023194788124781027, | |
| "loss": 0.4576, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.00023103608124290363, | |
| "loss": 0.4899, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.00023012361976884553, | |
| "loss": 0.4477, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.00022921051627284382, | |
| "loss": 0.4781, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.00022829679021578935, | |
| "loss": 0.4776, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.0002273824610718418, | |
| "loss": 0.4792, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.00022646754832801426, | |
| "loss": 0.4764, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.00022555207148375813, | |
| "loss": 0.4788, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.0002246360500505473, | |
| "loss": 0.4535, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.00022371950355146262, | |
| "loss": 0.47, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.00022280245152077538, | |
| "loss": 0.4924, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.00022188491350353152, | |
| "loss": 0.4373, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.00022096690905513446, | |
| "loss": 0.4597, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.00022004845774092882, | |
| "loss": 0.4471, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.0002191295791357832, | |
| "loss": 0.4499, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.00021821029282367298, | |
| "loss": 0.4552, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.0002172906183972629, | |
| "loss": 0.4394, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.0002163705754574897, | |
| "loss": 0.4647, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.00021545018361314404, | |
| "loss": 0.4505, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.00021452946248045284, | |
| "loss": 0.4663, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.00021360843168266125, | |
| "loss": 0.4594, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.00021268711084961397, | |
| "loss": 0.4636, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.00021176551961733746, | |
| "loss": 0.4335, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.00021084367762762106, | |
| "loss": 0.4706, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.0002099216045275985, | |
| "loss": 0.4764, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.0002089993199693291, | |
| "loss": 0.4501, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.0002080768436093791, | |
| "loss": 0.4456, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.00020715419510840234, | |
| "loss": 0.4476, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.0002062313941307219, | |
| "loss": 0.4237, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.00020530846034391012, | |
| "loss": 0.4244, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.00020438541341837025, | |
| "loss": 0.4559, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 0.00020346227302691657, | |
| "loss": 0.4291, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.00020253905884435564, | |
| "loss": 0.4382, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.00020161579054706654, | |
| "loss": 0.4504, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.00020069248781258184, | |
| "loss": 0.4425, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.00019976917031916784, | |
| "loss": 0.4178, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.00019884585774540575, | |
| "loss": 0.4357, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.00019792256976977154, | |
| "loss": 0.4217, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 0.00019699932607021727, | |
| "loss": 0.4504, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 0.00019607614632375102, | |
| "loss": 0.4306, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 0.00019515305020601813, | |
| "loss": 0.4165, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 0.00019423005739088137, | |
| "loss": 0.4169, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 0.00019330718755000196, | |
| "loss": 0.4376, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.0001923844603524201, | |
| "loss": 0.431, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 0.0001914618954641359, | |
| "loss": 0.4529, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 0.0001905395125476901, | |
| "loss": 0.4637, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.00018961733126174522, | |
| "loss": 0.4561, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.00018869537126066632, | |
| "loss": 0.4135, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.0001877736521941023, | |
| "loss": 0.437, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 0.00018685219370656714, | |
| "loss": 0.4294, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 0.0001859310154370209, | |
| "loss": 0.4162, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 0.0001850101370184516, | |
| "loss": 0.4272, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 0.0001840895780774564, | |
| "loss": 0.4711, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 0.0001831693582338236, | |
| "loss": 0.4131, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 0.00018224949710011416, | |
| "loss": 0.4377, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 0.00018133001428124416, | |
| "loss": 0.4124, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 0.0001804109293740664, | |
| "loss": 0.4577, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.00017949226196695327, | |
| "loss": 0.4371, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 0.00017857403163937884, | |
| "loss": 0.4465, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 0.00017765625796150184, | |
| "loss": 0.4421, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.00017673896049374846, | |
| "loss": 0.4143, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.00017582215878639542, | |
| "loss": 0.4397, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 0.00017490587237915342, | |
| "loss": 0.3917, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 0.0001739901208007505, | |
| "loss": 0.4459, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.0001730749235685161, | |
| "loss": 0.4342, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 0.00017216030018796471, | |
| "loss": 0.4459, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.0001712462701523807, | |
| "loss": 0.3873, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 0.0001703328529424022, | |
| "loss": 0.435, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 0.00016942006802560643, | |
| "loss": 0.4163, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.00016850793485609453, | |
| "loss": 0.4514, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 0.00016759647287407708, | |
| "loss": 0.4189, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.00016668570150545962, | |
| "loss": 0.4232, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 0.00016577564016142875, | |
| "loss": 0.3958, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.00016486630823803837, | |
| "loss": 0.4073, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.0001639577251157964, | |
| "loss": 0.4277, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 0.0001630499101592514, | |
| "loss": 0.415, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 0.00016214288271658035, | |
| "loss": 0.4244, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 0.00016123666211917582, | |
| "loss": 0.4209, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.00016033126768123435, | |
| "loss": 0.4036, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 0.00015942671869934437, | |
| "loss": 0.4185, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.00015852303445207543, | |
| "loss": 0.4152, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.0001576202341995668, | |
| "loss": 0.409, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 0.00015671833718311766, | |
| "loss": 0.4182, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.00015581736262477617, | |
| "loss": 0.4158, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 0.0001549173297269305, | |
| "loss": 0.4292, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 0.00015401825767189916, | |
| "loss": 0.4116, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 0.0001531201656215224, | |
| "loss": 0.4074, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 0.0001522230727167536, | |
| "loss": 0.419, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 0.0001513269980772516, | |
| "loss": 0.4095, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 0.00015043196080097285, | |
| "loss": 0.4093, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 0.00014953797996376483, | |
| "loss": 0.4015, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 0.00014864507461895894, | |
| "loss": 0.4379, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.00014775326379696494, | |
| "loss": 0.4175, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.00014686256650486498, | |
| "loss": 0.4092, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 0.00014597300172600877, | |
| "loss": 0.4226, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 0.00014508458841960877, | |
| "loss": 0.4073, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 0.00014419734552033626, | |
| "loss": 0.4199, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 0.0001433112919379177, | |
| "loss": 0.4296, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 0.00014242644655673177, | |
| "loss": 0.417, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 0.00014154282823540702, | |
| "loss": 0.3994, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 0.0001406604558064195, | |
| "loss": 0.4364, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 0.00013977934807569186, | |
| "loss": 0.4115, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 0.0001388995238221923, | |
| "loss": 0.4242, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 0.00013802100179753442, | |
| "loss": 0.4149, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.00013714380072557745, | |
| "loss": 0.387, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.0001362679393020274, | |
| "loss": 0.416, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 0.00013539343619403836, | |
| "loss": 0.4054, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 0.0001345203100398149, | |
| "loss": 0.4125, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 0.0001336485794482145, | |
| "loss": 0.3986, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 0.00013277826299835138, | |
| "loss": 0.3871, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 0.00013190937923920006, | |
| "loss": 0.4413, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.00013104194668920046, | |
| "loss": 0.3995, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.00013017598383586285, | |
| "loss": 0.3851, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 0.00012931150913537411, | |
| "loss": 0.4124, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 0.00012844854101220414, | |
| "loss": 0.3702, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.0001275870978587133, | |
| "loss": 0.3955, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 0.00012672719803476066, | |
| "loss": 0.4327, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 0.0001258688598673121, | |
| "loss": 0.4021, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 0.00012501210165005028, | |
| "loss": 0.3967, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 0.00012415694164298442, | |
| "loss": 0.4014, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 0.00012330339807206133, | |
| "loss": 0.3862, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 0.00012245148912877672, | |
| "loss": 0.4086, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 0.00012160123296978782, | |
| "loss": 0.4086, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 0.00012075264771652603, | |
| "loss": 0.4196, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 0.00011990575145481107, | |
| "loss": 0.3967, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.00011906056223446514, | |
| "loss": 0.4201, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 0.00011821709806892863, | |
| "loss": 0.3791, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 0.00011737537693487582, | |
| "loss": 0.3999, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 0.00011653541677183199, | |
| "loss": 0.3837, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 0.00011569723548179114, | |
| "loss": 0.3836, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 0.00011486085092883409, | |
| "loss": 0.3927, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 0.00011402628093874805, | |
| "loss": 0.4033, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 0.00011319354329864653, | |
| "loss": 0.4194, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 0.0001123626557565907, | |
| "loss": 0.42, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 0.00011153363602121034, | |
| "loss": 0.3982, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 0.00011070650176132702, | |
| "loss": 0.3783, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 0.0001098812706055772, | |
| "loss": 0.3788, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 0.00010905796014203696, | |
| "loss": 0.37, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 0.00010823658791784657, | |
| "loss": 0.3754, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 0.00010741717143883687, | |
| "loss": 0.3842, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 0.00010659972816915598, | |
| "loss": 0.386, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 0.00010578427553089747, | |
| "loss": 0.3629, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 0.0001049708309037285, | |
| "loss": 0.434, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 0.00010415941162451974, | |
| "loss": 0.3866, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 0.00010335003498697582, | |
| "loss": 0.4052, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 0.0001025427182412668, | |
| "loss": 0.3788, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 0.00010173747859366037, | |
| "loss": 0.3999, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 0.00010093433320615525, | |
| "loss": 0.3692, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 0.00010013329919611526, | |
| "loss": 0.4155, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 9.933439363590487e-05, | |
| "loss": 0.4225, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 9.853763355252501e-05, | |
| "loss": 0.403, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 9.774303592725018e-05, | |
| "loss": 0.3906, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 9.695061769526664e-05, | |
| "loss": 0.3817, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 9.616039574531133e-05, | |
| "loss": 0.3836, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 9.537238691931234e-05, | |
| "loss": 0.377, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 9.458660801202939e-05, | |
| "loss": 0.3961, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 9.38030757706962e-05, | |
| "loss": 0.3856, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 9.302180689466349e-05, | |
| "loss": 0.417, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 9.224281803504322e-05, | |
| "loss": 0.4236, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 9.146612579435343e-05, | |
| "loss": 0.3682, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 9.069174672616456e-05, | |
| "loss": 0.4029, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 8.991969733474652e-05, | |
| "loss": 0.3887, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 8.914999407471735e-05, | |
| "loss": 0.4017, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 8.838265335069186e-05, | |
| "loss": 0.3597, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 8.761769151693253e-05, | |
| "loss": 0.3692, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 8.685512487700062e-05, | |
| "loss": 0.4036, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.609496968340904e-05, | |
| "loss": 0.3759, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.533724213727572e-05, | |
| "loss": 0.4027, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 8.45819583879783e-05, | |
| "loss": 0.3934, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 8.382913453280991e-05, | |
| "loss": 0.362, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 8.307878661663632e-05, | |
| "loss": 0.4174, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 8.233093063155392e-05, | |
| "loss": 0.3895, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 8.158558251654861e-05, | |
| "loss": 0.3769, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.084275815715637e-05, | |
| "loss": 0.4198, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.010247338512453e-05, | |
| "loss": 0.3912, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 7.936474397807463e-05, | |
| "loss": 0.3714, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 7.862958565916579e-05, | |
| "loss": 0.3935, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 7.789701409675976e-05, | |
| "loss": 0.3822, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 7.716704490408697e-05, | |
| "loss": 0.3716, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 7.643969363891397e-05, | |
| "loss": 0.3969, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 7.571497580321145e-05, | |
| "loss": 0.4048, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 7.499290684282414e-05, | |
| "loss": 0.4254, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 7.42735021471414e-05, | |
| "loss": 0.3768, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 7.355677704876976e-05, | |
| "loss": 0.3968, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 7.284274682320533e-05, | |
| "loss": 0.3742, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 7.213142668850876e-05, | |
| "loss": 0.3829, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 7.142283180498079e-05, | |
| "loss": 0.3943, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 7.071697727483924e-05, | |
| "loss": 0.3884, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 7.001387814189685e-05, | |
| "loss": 0.3574, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 6.931354939124097e-05, | |
| "loss": 0.3909, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 6.861600594891391e-05, | |
| "loss": 0.3953, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 6.792126268159516e-05, | |
| "loss": 0.3741, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 6.722933439628416e-05, | |
| "loss": 0.3807, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 6.654023583998495e-05, | |
| "loss": 0.3761, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 6.585398169939179e-05, | |
| "loss": 0.385, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 6.517058660057629e-05, | |
| "loss": 0.3625, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 6.449006510867544e-05, | |
| "loss": 0.3673, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 6.381243172758133e-05, | |
| "loss": 0.3793, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 6.313770089963196e-05, | |
| "loss": 0.4076, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 6.246588700530364e-05, | |
| "loss": 0.4015, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 6.179700436290428e-05, | |
| "loss": 0.387, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 6.113106722826822e-05, | |
| "loss": 0.3787, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 6.0468089794452463e-05, | |
| "loss": 0.3771, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 5.9808086191434185e-05, | |
| "loss": 0.3622, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 5.9151070485809704e-05, | |
| "loss": 0.3603, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 5.849705668049443e-05, | |
| "loss": 0.3642, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 5.7846058714424546e-05, | |
| "loss": 0.3696, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 5.7198090462259926e-05, | |
| "loss": 0.3658, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 5.6553165734088575e-05, | |
| "loss": 0.3847, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 5.591129827513197e-05, | |
| "loss": 0.3739, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 5.52725017654524e-05, | |
| "loss": 0.3892, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 5.463678981966114e-05, | |
| "loss": 0.3983, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 5.4004175986628705e-05, | |
| "loss": 0.3799, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 5.33746737491956e-05, | |
| "loss": 0.3927, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 5.274829652388524e-05, | |
| "loss": 0.3849, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 5.2125057660617905e-05, | |
| "loss": 0.3722, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 5.15049704424263e-05, | |
| "loss": 0.3493, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 5.088804808517253e-05, | |
| "loss": 0.3846, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 5.027430373726607e-05, | |
| "loss": 0.3681, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 4.9663750479383894e-05, | |
| "loss": 0.3685, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 4.9056401324191516e-05, | |
| "loss": 0.3818, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 4.845226921606587e-05, | |
| "loss": 0.3884, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 4.7851367030819026e-05, | |
| "loss": 0.3732, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 4.725370757542418e-05, | |
| "loss": 0.4064, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 4.6659303587742356e-05, | |
| "loss": 0.36, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 4.6068167736251335e-05, | |
| "loss": 0.3637, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 4.548031261977519e-05, | |
| "loss": 0.3608, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 4.4895750767216085e-05, | |
| "loss": 0.3946, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 4.431449463728707e-05, | |
| "loss": 0.3578, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 4.373655661824685e-05, | |
| "loss": 0.3779, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 4.316194902763531e-05, | |
| "loss": 0.3648, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 4.259068411201128e-05, | |
| "loss": 0.3845, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 4.202277404669144e-05, | |
| "loss": 0.373, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 4.1458230935490974e-05, | |
| "loss": 0.367, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 4.089706681046541e-05, | |
| "loss": 0.3856, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 4.033929363165425e-05, | |
| "loss": 0.3748, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 3.978492328682597e-05, | |
| "loss": 0.359, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 3.923396759122497e-05, | |
| "loss": 0.3932, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 3.868643828731942e-05, | |
| "loss": 0.3848, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.8142347044551044e-05, | |
| "loss": 0.3626, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.760170545908659e-05, | |
| "loss": 0.3665, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 3.706452505357063e-05, | |
| "loss": 0.3594, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 3.653081727687979e-05, | |
| "loss": 0.3892, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.6000593503878965e-05, | |
| "loss": 0.3906, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.547386503517869e-05, | |
| "loss": 0.3774, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.495064309689462e-05, | |
| "loss": 0.3849, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.44309388404078e-05, | |
| "loss": 0.3646, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 3.391476334212742e-05, | |
| "loss": 0.3791, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 3.340212760325441e-05, | |
| "loss": 0.3775, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 3.2893042549547305e-05, | |
| "loss": 0.3772, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 3.2387519031089185e-05, | |
| "loss": 0.3676, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 3.188556782205634e-05, | |
| "loss": 0.3717, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 3.1387199620488816e-05, | |
| "loss": 0.3529, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 3.0892425048062355e-05, | |
| "loss": 0.387, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 3.0401254649862075e-05, | |
| "loss": 0.3555, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 2.991369889415756e-05, | |
| "loss": 0.3747, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 2.9429768172179907e-05, | |
| "loss": 0.3653, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 2.89494727979001e-05, | |
| "loss": 0.3814, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 2.847282300780949e-05, | |
| "loss": 0.3917, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 2.7999828960701258e-05, | |
| "loss": 0.3891, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 2.7530500737454135e-05, | |
| "loss": 0.366, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 2.7064848340817418e-05, | |
| "loss": 0.3816, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 2.6602881695198002e-05, | |
| "loss": 0.3734, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 2.6144610646448553e-05, | |
| "loss": 0.3593, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 2.5690044961657877e-05, | |
| "loss": 0.3732, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 2.5239194328942684e-05, | |
| "loss": 0.3699, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 2.4792068357241148e-05, | |
| "loss": 0.3829, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 2.434867657610813e-05, | |
| "loss": 0.3397, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 2.3909028435511928e-05, | |
| "loss": 0.3451, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 2.3473133305633012e-05, | |
| "loss": 0.3498, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 2.3041000476664242e-05, | |
| "loss": 0.3585, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 2.2612639158613046e-05, | |
| "loss": 0.3637, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 2.218805848110481e-05, | |
| "loss": 0.3702, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 2.176726749318856e-05, | |
| "loss": 0.3695, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 2.1350275163143983e-05, | |
| "loss": 0.3509, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 2.0937090378290412e-05, | |
| "loss": 0.3992, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 2.0527721944797216e-05, | |
| "loss": 0.3609, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 2.0122178587496253e-05, | |
| "loss": 0.3842, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 1.9720468949695815e-05, | |
| "loss": 0.3618, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 1.932260159299666e-05, | |
| "loss": 0.3936, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 1.8928584997109188e-05, | |
| "loss": 0.3775, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 1.8538427559672923e-05, | |
| "loss": 0.3782, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 1.815213759607759e-05, | |
| "loss": 0.377, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 1.776972333928566e-05, | |
| "loss": 0.3679, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 1.7391192939657184e-05, | |
| "loss": 0.3905, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 1.701655446477577e-05, | |
| "loss": 0.3598, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 1.6645815899276852e-05, | |
| "loss": 0.3906, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 1.6278985144677493e-05, | |
| "loss": 0.3544, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 1.5916070019207874e-05, | |
| "loss": 0.3591, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 1.5557078257644763e-05, | |
| "loss": 0.3525, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 1.5202017511146605e-05, | |
| "loss": 0.3536, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 1.4850895347090588e-05, | |
| "loss": 0.3589, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 1.4503719248911074e-05, | |
| "loss": 0.3666, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 1.4160496615940455e-05, | |
| "loss": 0.3742, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 1.3821234763251145e-05, | |
| "loss": 0.3774, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 1.348594092149995e-05, | |
| "loss": 0.38, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 1.3154622236773706e-05, | |
| "loss": 0.3646, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 1.2827285770437148e-05, | |
| "loss": 0.3869, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 1.2503938498982304e-05, | |
| "loss": 0.3581, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 1.218458731387997e-05, | |
| "loss": 0.3802, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 1.1869239021432554e-05, | |
| "loss": 0.3986, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 1.1557900342629403e-05, | |
| "loss": 0.3718, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 1.1250577913003101e-05, | |
| "loss": 0.3724, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 1.0947278282488404e-05, | |
| "loss": 0.3783, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 1.0648007915282554e-05, | |
| "loss": 0.3928, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 1.0352773189707377e-05, | |
| "loss": 0.3598, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 1.00615803980735e-05, | |
| "loss": 0.3629, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 9.77443574654615e-06, | |
| "loss": 0.3759, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 9.49134535501297e-06, | |
| "loss": 0.3815, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 9.212315256953497e-06, | |
| "loss": 0.3566, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 8.937351399310557e-06, | |
| "loss": 0.3885, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 8.666459642363633e-06, | |
| "loss": 0.3698, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 8.399645759603924e-06, | |
| "loss": 0.3671, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 8.136915437611193e-06, | |
| "loss": 0.3572, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 7.878274275932662e-06, | |
| "loss": 0.3812, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 7.623727786963697e-06, | |
| "loss": 0.3933, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 7.373281395830289e-06, | |
| "loss": 0.3667, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 7.126940440273311e-06, | |
| "loss": 0.3808, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 6.884710170535025e-06, | |
| "loss": 0.3645, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 6.646595749246909e-06, | |
| "loss": 0.3694, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 6.412602251319744e-06, | |
| "loss": 0.3808, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 6.182734663835521e-06, | |
| "loss": 0.3807, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 5.956997885941018e-06, | |
| "loss": 0.3421, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 5.735396728743481e-06, | |
| "loss": 0.3903, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 5.517935915207995e-06, | |
| "loss": 0.3798, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 5.3046200800569655e-06, | |
| "loss": 0.3535, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 5.095453769671199e-06, | |
| "loss": 0.3873, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 4.890441441993087e-06, | |
| "loss": 0.352, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 4.689587466431511e-06, | |
| "loss": 0.3501, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 4.492896123768864e-06, | |
| "loss": 0.3694, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 4.300371606069686e-06, | |
| "loss": 0.3873, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 4.112018016591313e-06, | |
| "loss": 0.3556, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.92783936969654e-06, | |
| "loss": 0.3664, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 3.747839590767965e-06, | |
| "loss": 0.3754, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.5720225161243624e-06, | |
| "loss": 0.3835, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.4003918929389033e-06, | |
| "loss": 0.3751, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.232951379159288e-06, | |
| "loss": 0.3577, | |
| "step": 1326 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.0697045434298744e-06, | |
| "loss": 0.3515, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 2.91065486501545e-06, | |
| "loss": 0.3856, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 2.7558057337272234e-06, | |
| "loss": 0.3807, | |
| "step": 1332 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 2.605160449850508e-06, | |
| "loss": 0.369, | |
| "step": 1334 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 2.458722224074439e-06, | |
| "loss": 0.3743, | |
| "step": 1336 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 2.316494177423456e-06, | |
| "loss": 0.359, | |
| "step": 1338 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 2.178479341190909e-06, | |
| "loss": 0.3892, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 2.0446806568742873e-06, | |
| "loss": 0.3819, | |
| "step": 1342 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 1.9151009761126937e-06, | |
| "loss": 0.4031, | |
| "step": 1344 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 1.7897430606259812e-06, | |
| "loss": 0.3721, | |
| "step": 1346 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 1.6686095821558888e-06, | |
| "loss": 0.3928, | |
| "step": 1348 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 1.5517031224091095e-06, | |
| "loss": 0.3874, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 1.4390261730023337e-06, | |
| "loss": 0.3787, | |
| "step": 1352 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 1.3305811354090037e-06, | |
| "loss": 0.3714, | |
| "step": 1354 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 1.2263703209082878e-06, | |
| "loss": 0.3527, | |
| "step": 1356 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 1.1263959505356747e-06, | |
| "loss": 0.3698, | |
| "step": 1358 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 1.0306601550357676e-06, | |
| "loss": 0.3914, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 9.391649748167863e-07, | |
| "loss": 0.3822, | |
| "step": 1362 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 8.519123599071366e-07, | |
| "loss": 0.3616, | |
| "step": 1364 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 7.689041699137534e-07, | |
| "loss": 0.3453, | |
| "step": 1366 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 6.901421739825775e-07, | |
| "loss": 0.3632, | |
| "step": 1368 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 6.15628050760808e-07, | |
| "loss": 0.3649, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 5.453633883611309e-07, | |
| "loss": 0.3617, | |
| "step": 1372 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 4.793496843278345e-07, | |
| "loss": 0.3749, | |
| "step": 1374 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 4.1758834560492457e-07, | |
| "loss": 0.3972, | |
| "step": 1376 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 3.6008068850617025e-07, | |
| "loss": 0.3565, | |
| "step": 1378 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 3.06827938686971e-07, | |
| "loss": 0.3714, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 2.5783123111828846e-07, | |
| "loss": 0.3537, | |
| "step": 1382 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 2.13091610062488e-07, | |
| "loss": 0.3692, | |
| "step": 1384 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 1.726100290509791e-07, | |
| "loss": 0.3682, | |
| "step": 1386 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 1.363873508639646e-07, | |
| "loss": 0.3834, | |
| "step": 1388 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 1.0442434751212205e-07, | |
| "loss": 0.3683, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 7.672170021995052e-08, | |
| "loss": 0.356, | |
| "step": 1392 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 5.327999941147077e-08, | |
| "loss": 0.3902, | |
| "step": 1394 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.40997446974578e-08, | |
| "loss": 0.3715, | |
| "step": 1396 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 1.918134486496026e-08, | |
| "loss": 0.3553, | |
| "step": 1398 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 8.525117868463105e-09, | |
| "loss": 0.3679, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 2.1312908231152506e-09, | |
| "loss": 0.3738, | |
| "step": 1402 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.3884, | |
| "step": 1404 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "step": 1404, | |
| "total_flos": 2.6724511228991898e+17, | |
| "train_loss": 0.5554745374751566, | |
| "train_runtime": 66017.7666, | |
| "train_samples_per_second": 10.888, | |
| "train_steps_per_second": 0.021 | |
| } | |
| ], | |
| "max_steps": 1404, | |
| "num_train_epochs": 4, | |
| "total_flos": 2.6724511228991898e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |