| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 106, | |
| "global_step": 848, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0023584905660377358, | |
| "grad_norm": 5.1080776865120034, | |
| "learning_rate": 0.0, | |
| "loss": 0.9592, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0023584905660377358, | |
| "eval_loss": 0.9177566170692444, | |
| "eval_runtime": 83.3901, | |
| "eval_samples_per_second": 8.454, | |
| "eval_steps_per_second": 0.54, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0047169811320754715, | |
| "grad_norm": 5.216656877082355, | |
| "learning_rate": 4e-08, | |
| "loss": 0.9643, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.007075471698113208, | |
| "grad_norm": 5.86435572061478, | |
| "learning_rate": 8e-08, | |
| "loss": 0.9448, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.009433962264150943, | |
| "grad_norm": 4.885606218308886, | |
| "learning_rate": 1.2e-07, | |
| "loss": 0.9499, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01179245283018868, | |
| "grad_norm": 4.863782725318177, | |
| "learning_rate": 1.6e-07, | |
| "loss": 0.9547, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.014150943396226415, | |
| "grad_norm": 4.84448661904324, | |
| "learning_rate": 2e-07, | |
| "loss": 0.9378, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01650943396226415, | |
| "grad_norm": 4.594764533947918, | |
| "learning_rate": 2.4e-07, | |
| "loss": 0.967, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.018867924528301886, | |
| "grad_norm": 5.311507883731841, | |
| "learning_rate": 2.8e-07, | |
| "loss": 0.9534, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02122641509433962, | |
| "grad_norm": 4.780331285112587, | |
| "learning_rate": 3.2e-07, | |
| "loss": 0.979, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.02358490566037736, | |
| "grad_norm": 5.172814213529754, | |
| "learning_rate": 3.6e-07, | |
| "loss": 0.9284, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.025943396226415096, | |
| "grad_norm": 4.693805464386623, | |
| "learning_rate": 4e-07, | |
| "loss": 0.9452, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.02830188679245283, | |
| "grad_norm": 5.378045080991313, | |
| "learning_rate": 4.3999999999999997e-07, | |
| "loss": 0.9575, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.030660377358490566, | |
| "grad_norm": 4.718804488820069, | |
| "learning_rate": 4.8e-07, | |
| "loss": 0.9601, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.0330188679245283, | |
| "grad_norm": 4.8688696728838785, | |
| "learning_rate": 5.2e-07, | |
| "loss": 0.9578, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.03537735849056604, | |
| "grad_norm": 4.41639309685216, | |
| "learning_rate": 5.6e-07, | |
| "loss": 0.9433, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03773584905660377, | |
| "grad_norm": 4.330783791521376, | |
| "learning_rate": 6e-07, | |
| "loss": 0.9498, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.04009433962264151, | |
| "grad_norm": 4.580748421983631, | |
| "learning_rate": 6.4e-07, | |
| "loss": 0.9607, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.04245283018867924, | |
| "grad_norm": 3.895307917106911, | |
| "learning_rate": 6.800000000000001e-07, | |
| "loss": 0.9561, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.04481132075471698, | |
| "grad_norm": 3.9063376740559894, | |
| "learning_rate": 7.2e-07, | |
| "loss": 0.9423, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.04716981132075472, | |
| "grad_norm": 3.9785213934452335, | |
| "learning_rate": 7.599999999999999e-07, | |
| "loss": 0.9422, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.049528301886792456, | |
| "grad_norm": 3.5176355010251044, | |
| "learning_rate": 8e-07, | |
| "loss": 0.9289, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.05188679245283019, | |
| "grad_norm": 5.022294330418908, | |
| "learning_rate": 8.399999999999999e-07, | |
| "loss": 0.9446, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.054245283018867926, | |
| "grad_norm": 2.6661772621278637, | |
| "learning_rate": 8.799999999999999e-07, | |
| "loss": 0.9396, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.05660377358490566, | |
| "grad_norm": 2.8934776201699757, | |
| "learning_rate": 9.2e-07, | |
| "loss": 0.9424, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.0589622641509434, | |
| "grad_norm": 3.310951896187245, | |
| "learning_rate": 9.6e-07, | |
| "loss": 0.9317, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.06132075471698113, | |
| "grad_norm": 3.180088649692653, | |
| "learning_rate": 1e-06, | |
| "loss": 0.8733, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.06367924528301887, | |
| "grad_norm": 3.0819569242393032, | |
| "learning_rate": 1.04e-06, | |
| "loss": 0.9422, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.0660377358490566, | |
| "grad_norm": 4.5985187060206405, | |
| "learning_rate": 1.08e-06, | |
| "loss": 0.9522, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.06839622641509434, | |
| "grad_norm": 2.614037871400199, | |
| "learning_rate": 1.12e-06, | |
| "loss": 0.9168, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.07075471698113207, | |
| "grad_norm": 3.4637889884655007, | |
| "learning_rate": 1.16e-06, | |
| "loss": 0.8819, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.07311320754716981, | |
| "grad_norm": 2.7607684537358623, | |
| "learning_rate": 1.2e-06, | |
| "loss": 0.9443, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.07547169811320754, | |
| "grad_norm": 2.45835136955183, | |
| "learning_rate": 1.24e-06, | |
| "loss": 0.8971, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.07783018867924528, | |
| "grad_norm": 2.8319894880867724, | |
| "learning_rate": 1.28e-06, | |
| "loss": 0.9311, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.08018867924528301, | |
| "grad_norm": 1.880755586715356, | |
| "learning_rate": 1.32e-06, | |
| "loss": 0.895, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.08254716981132075, | |
| "grad_norm": 3.1062915440189713, | |
| "learning_rate": 1.3600000000000001e-06, | |
| "loss": 0.9074, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.08490566037735849, | |
| "grad_norm": 2.745579181683214, | |
| "learning_rate": 1.4e-06, | |
| "loss": 0.9066, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.08726415094339622, | |
| "grad_norm": 2.51965653055992, | |
| "learning_rate": 1.44e-06, | |
| "loss": 0.8737, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.08962264150943396, | |
| "grad_norm": 2.324394856248302, | |
| "learning_rate": 1.48e-06, | |
| "loss": 0.9239, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.09198113207547169, | |
| "grad_norm": 2.7275453116030834, | |
| "learning_rate": 1.5199999999999998e-06, | |
| "loss": 0.9029, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.09433962264150944, | |
| "grad_norm": 2.8394394816071498, | |
| "learning_rate": 1.5599999999999999e-06, | |
| "loss": 0.885, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.09669811320754718, | |
| "grad_norm": 2.3797648877001323, | |
| "learning_rate": 1.6e-06, | |
| "loss": 0.9084, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.09905660377358491, | |
| "grad_norm": 2.298049507841523, | |
| "learning_rate": 1.6399999999999998e-06, | |
| "loss": 0.8786, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.10141509433962265, | |
| "grad_norm": 2.0882957626327814, | |
| "learning_rate": 1.6799999999999998e-06, | |
| "loss": 0.885, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.10377358490566038, | |
| "grad_norm": 1.9805393143682433, | |
| "learning_rate": 1.7199999999999998e-06, | |
| "loss": 0.9202, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.10613207547169812, | |
| "grad_norm": 2.1136265646711605, | |
| "learning_rate": 1.7599999999999999e-06, | |
| "loss": 0.8515, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.10849056603773585, | |
| "grad_norm": 1.7161578870903316, | |
| "learning_rate": 1.8e-06, | |
| "loss": 0.9035, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.11084905660377359, | |
| "grad_norm": 2.2352887975077795, | |
| "learning_rate": 1.84e-06, | |
| "loss": 0.8615, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.11320754716981132, | |
| "grad_norm": 1.997343145683379, | |
| "learning_rate": 1.8799999999999998e-06, | |
| "loss": 0.8958, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.11556603773584906, | |
| "grad_norm": 2.1520613046888024, | |
| "learning_rate": 1.92e-06, | |
| "loss": 0.8706, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1179245283018868, | |
| "grad_norm": 1.610078026570572, | |
| "learning_rate": 1.96e-06, | |
| "loss": 0.9127, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.12028301886792453, | |
| "grad_norm": 1.6267119325099246, | |
| "learning_rate": 2e-06, | |
| "loss": 0.8604, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.12264150943396226, | |
| "grad_norm": 1.7305415242899538, | |
| "learning_rate": 1.99999225068337e-06, | |
| "loss": 0.8704, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.125, | |
| "grad_norm": 4.837245274184259, | |
| "learning_rate": 1.9999690028535855e-06, | |
| "loss": 0.9003, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.12735849056603774, | |
| "grad_norm": 2.006265922883068, | |
| "learning_rate": 1.9999302568709546e-06, | |
| "loss": 0.8759, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.12971698113207547, | |
| "grad_norm": 1.8749359565723283, | |
| "learning_rate": 1.999876013335988e-06, | |
| "loss": 0.8686, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.1320754716981132, | |
| "grad_norm": 1.5041642645696787, | |
| "learning_rate": 1.9998062730893862e-06, | |
| "loss": 0.8727, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.13443396226415094, | |
| "grad_norm": 1.4630749706626824, | |
| "learning_rate": 1.9997210372120272e-06, | |
| "loss": 0.8718, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.13679245283018868, | |
| "grad_norm": 1.4357346298179297, | |
| "learning_rate": 1.9996203070249514e-06, | |
| "loss": 0.9, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.1391509433962264, | |
| "grad_norm": 1.589349788137753, | |
| "learning_rate": 1.9995040840893383e-06, | |
| "loss": 0.8832, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.14150943396226415, | |
| "grad_norm": 1.3078655659788383, | |
| "learning_rate": 1.9993723702064853e-06, | |
| "loss": 0.8689, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.14386792452830188, | |
| "grad_norm": 1.2162726319357984, | |
| "learning_rate": 1.9992251674177763e-06, | |
| "loss": 0.8565, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.14622641509433962, | |
| "grad_norm": 1.4682400986060873, | |
| "learning_rate": 1.999062478004655e-06, | |
| "loss": 0.8768, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.14858490566037735, | |
| "grad_norm": 1.475835127346073, | |
| "learning_rate": 1.9988843044885837e-06, | |
| "loss": 0.8485, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.1509433962264151, | |
| "grad_norm": 1.313661603517591, | |
| "learning_rate": 1.998690649631009e-06, | |
| "loss": 0.8791, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.15330188679245282, | |
| "grad_norm": 1.2318794472546806, | |
| "learning_rate": 1.998481516433316e-06, | |
| "loss": 0.8418, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.15566037735849056, | |
| "grad_norm": 1.224129101101457, | |
| "learning_rate": 1.9982569081367843e-06, | |
| "loss": 0.8472, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.1580188679245283, | |
| "grad_norm": 1.4354409800877264, | |
| "learning_rate": 1.9980168282225344e-06, | |
| "loss": 0.8682, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.16037735849056603, | |
| "grad_norm": 1.3882277723705556, | |
| "learning_rate": 1.9977612804114775e-06, | |
| "loss": 0.8929, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.16273584905660377, | |
| "grad_norm": 1.1353588967534465, | |
| "learning_rate": 1.9974902686642557e-06, | |
| "loss": 0.8123, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.1650943396226415, | |
| "grad_norm": 1.1680380889234738, | |
| "learning_rate": 1.9972037971811797e-06, | |
| "loss": 0.8573, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.16745283018867924, | |
| "grad_norm": 1.1797181726680495, | |
| "learning_rate": 1.9969018704021675e-06, | |
| "loss": 0.8518, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.16981132075471697, | |
| "grad_norm": 1.3076313839386762, | |
| "learning_rate": 1.9965844930066696e-06, | |
| "loss": 0.8425, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.1721698113207547, | |
| "grad_norm": 1.2192501506435032, | |
| "learning_rate": 1.9962516699136036e-06, | |
| "loss": 0.8633, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.17452830188679244, | |
| "grad_norm": 1.1885172994924504, | |
| "learning_rate": 1.9959034062812714e-06, | |
| "loss": 0.8417, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.17688679245283018, | |
| "grad_norm": 1.0228274366877697, | |
| "learning_rate": 1.9955397075072833e-06, | |
| "loss": 0.8645, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1792452830188679, | |
| "grad_norm": 1.0362433430443114, | |
| "learning_rate": 1.9951605792284742e-06, | |
| "loss": 0.8442, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.18160377358490565, | |
| "grad_norm": 1.3774246732359812, | |
| "learning_rate": 1.9947660273208134e-06, | |
| "loss": 0.8355, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.18396226415094338, | |
| "grad_norm": 1.3163064309206076, | |
| "learning_rate": 1.9943560578993165e-06, | |
| "loss": 0.8246, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.18632075471698112, | |
| "grad_norm": 1.0822503373501942, | |
| "learning_rate": 1.9939306773179494e-06, | |
| "loss": 0.8315, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.18867924528301888, | |
| "grad_norm": 1.0361871014520703, | |
| "learning_rate": 1.9934898921695292e-06, | |
| "loss": 0.8346, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.19103773584905662, | |
| "grad_norm": 1.0371978245512512, | |
| "learning_rate": 1.993033709285624e-06, | |
| "loss": 0.8706, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.19339622641509435, | |
| "grad_norm": 1.0574050613888015, | |
| "learning_rate": 1.992562135736444e-06, | |
| "loss": 0.8308, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.1957547169811321, | |
| "grad_norm": 1.241296877885062, | |
| "learning_rate": 1.992075178830736e-06, | |
| "loss": 0.872, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.19811320754716982, | |
| "grad_norm": 1.152024986745144, | |
| "learning_rate": 1.9915728461156654e-06, | |
| "loss": 0.8365, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.20047169811320756, | |
| "grad_norm": 1.0972286027771576, | |
| "learning_rate": 1.991055145376703e-06, | |
| "loss": 0.8511, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2028301886792453, | |
| "grad_norm": 1.3363467615851807, | |
| "learning_rate": 1.990522084637503e-06, | |
| "loss": 0.8604, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.20518867924528303, | |
| "grad_norm": 0.9249543605316475, | |
| "learning_rate": 1.9899736721597786e-06, | |
| "loss": 0.8078, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.20754716981132076, | |
| "grad_norm": 1.028197337537026, | |
| "learning_rate": 1.9894099164431722e-06, | |
| "loss": 0.8572, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.2099056603773585, | |
| "grad_norm": 0.9345635713942412, | |
| "learning_rate": 1.9888308262251284e-06, | |
| "loss": 0.814, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.21226415094339623, | |
| "grad_norm": 1.0476770833253457, | |
| "learning_rate": 1.9882364104807535e-06, | |
| "loss": 0.8358, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.21462264150943397, | |
| "grad_norm": 1.0517946592201646, | |
| "learning_rate": 1.9876266784226794e-06, | |
| "loss": 0.8263, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2169811320754717, | |
| "grad_norm": 1.1715532913432805, | |
| "learning_rate": 1.987001639500919e-06, | |
| "loss": 0.8268, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.21933962264150944, | |
| "grad_norm": 0.928474382958498, | |
| "learning_rate": 1.9863613034027223e-06, | |
| "loss": 0.8278, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.22169811320754718, | |
| "grad_norm": 0.9243352298229107, | |
| "learning_rate": 1.985705680052423e-06, | |
| "loss": 0.817, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2240566037735849, | |
| "grad_norm": 0.9825897380291061, | |
| "learning_rate": 1.985034779611287e-06, | |
| "loss": 0.8185, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.22641509433962265, | |
| "grad_norm": 1.0129581531905947, | |
| "learning_rate": 1.9843486124773543e-06, | |
| "loss": 0.8261, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.22877358490566038, | |
| "grad_norm": 0.9634164693430555, | |
| "learning_rate": 1.9836471892852777e-06, | |
| "loss": 0.8448, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.23113207547169812, | |
| "grad_norm": 0.9208269074792377, | |
| "learning_rate": 1.982930520906158e-06, | |
| "loss": 0.8435, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.23349056603773585, | |
| "grad_norm": 1.2744902927155426, | |
| "learning_rate": 1.9821986184473754e-06, | |
| "loss": 0.811, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.2358490566037736, | |
| "grad_norm": 0.9132891996164993, | |
| "learning_rate": 1.9814514932524176e-06, | |
| "loss": 0.8629, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.23820754716981132, | |
| "grad_norm": 1.3924445022644105, | |
| "learning_rate": 1.9806891569007048e-06, | |
| "loss": 0.8157, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.24056603773584906, | |
| "grad_norm": 0.9910775720488755, | |
| "learning_rate": 1.9799116212074075e-06, | |
| "loss": 0.8133, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.2429245283018868, | |
| "grad_norm": 0.8521457201237292, | |
| "learning_rate": 1.979118898223267e-06, | |
| "loss": 0.818, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.24528301886792453, | |
| "grad_norm": 0.961507205526783, | |
| "learning_rate": 1.978311000234406e-06, | |
| "loss": 0.8312, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.24764150943396226, | |
| "grad_norm": 0.9234203353918131, | |
| "learning_rate": 1.9774879397621383e-06, | |
| "loss": 0.8307, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.939970736839533, | |
| "learning_rate": 1.9766497295627777e-06, | |
| "loss": 0.8121, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "eval_loss": 0.7522591948509216, | |
| "eval_runtime": 82.8066, | |
| "eval_samples_per_second": 8.514, | |
| "eval_steps_per_second": 0.543, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.25235849056603776, | |
| "grad_norm": 1.009636311779919, | |
| "learning_rate": 1.9757963826274354e-06, | |
| "loss": 0.8321, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.25471698113207547, | |
| "grad_norm": 0.8979616538385209, | |
| "learning_rate": 1.9749279121818236e-06, | |
| "loss": 0.8442, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.25707547169811323, | |
| "grad_norm": 0.953771354540708, | |
| "learning_rate": 1.9740443316860463e-06, | |
| "loss": 0.8484, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.25943396226415094, | |
| "grad_norm": 1.0957061062800166, | |
| "learning_rate": 1.9731456548343944e-06, | |
| "loss": 0.8204, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.2617924528301887, | |
| "grad_norm": 0.8834656211178301, | |
| "learning_rate": 1.9722318955551303e-06, | |
| "loss": 0.7817, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.2641509433962264, | |
| "grad_norm": 1.0070698705880765, | |
| "learning_rate": 1.9713030680102743e-06, | |
| "loss": 0.8309, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.2665094339622642, | |
| "grad_norm": 1.0103166521916906, | |
| "learning_rate": 1.970359186595384e-06, | |
| "loss": 0.8454, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.2688679245283019, | |
| "grad_norm": 0.7965830780073067, | |
| "learning_rate": 1.9694002659393305e-06, | |
| "loss": 0.7659, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.27122641509433965, | |
| "grad_norm": 0.9676729525657803, | |
| "learning_rate": 1.968426320904074e-06, | |
| "loss": 0.8076, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.27358490566037735, | |
| "grad_norm": 0.8547052227174852, | |
| "learning_rate": 1.967437366584431e-06, | |
| "loss": 0.8305, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.2759433962264151, | |
| "grad_norm": 0.9129048151834617, | |
| "learning_rate": 1.9664334183078425e-06, | |
| "loss": 0.8443, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.2783018867924528, | |
| "grad_norm": 0.8521299847115695, | |
| "learning_rate": 1.965414491634134e-06, | |
| "loss": 0.8244, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.2806603773584906, | |
| "grad_norm": 0.8875033746133125, | |
| "learning_rate": 1.964380602355277e-06, | |
| "loss": 0.7855, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.2830188679245283, | |
| "grad_norm": 0.8322961791869938, | |
| "learning_rate": 1.9633317664951417e-06, | |
| "loss": 0.8246, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.28537735849056606, | |
| "grad_norm": 0.8996547969201056, | |
| "learning_rate": 1.9622680003092503e-06, | |
| "loss": 0.7911, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.28773584905660377, | |
| "grad_norm": 0.8421734340822833, | |
| "learning_rate": 1.9611893202845253e-06, | |
| "loss": 0.8075, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.29009433962264153, | |
| "grad_norm": 0.9715000278619699, | |
| "learning_rate": 1.9600957431390324e-06, | |
| "loss": 0.8258, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.29245283018867924, | |
| "grad_norm": 0.9486047429972293, | |
| "learning_rate": 1.9589872858217233e-06, | |
| "loss": 0.8249, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.294811320754717, | |
| "grad_norm": 0.8708681162275084, | |
| "learning_rate": 1.9578639655121707e-06, | |
| "loss": 0.8061, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.2971698113207547, | |
| "grad_norm": 0.815600627546263, | |
| "learning_rate": 1.9567257996203046e-06, | |
| "loss": 0.8117, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.29952830188679247, | |
| "grad_norm": 0.899644949674199, | |
| "learning_rate": 1.955572805786141e-06, | |
| "loss": 0.8324, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.3018867924528302, | |
| "grad_norm": 0.7797228121513826, | |
| "learning_rate": 1.9544050018795075e-06, | |
| "loss": 0.8085, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.30424528301886794, | |
| "grad_norm": 1.4425571525030387, | |
| "learning_rate": 1.953222405999769e-06, | |
| "loss": 0.8043, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.30660377358490565, | |
| "grad_norm": 1.3224705998967385, | |
| "learning_rate": 1.9520250364755458e-06, | |
| "loss": 0.8104, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.3089622641509434, | |
| "grad_norm": 0.8148761244743041, | |
| "learning_rate": 1.9508129118644293e-06, | |
| "loss": 0.828, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.3113207547169811, | |
| "grad_norm": 1.0994942919129091, | |
| "learning_rate": 1.949586050952693e-06, | |
| "loss": 0.8007, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.3136792452830189, | |
| "grad_norm": 0.9838294161632027, | |
| "learning_rate": 1.9483444727550054e-06, | |
| "loss": 0.8304, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.3160377358490566, | |
| "grad_norm": 1.0206044534349468, | |
| "learning_rate": 1.9470881965141307e-06, | |
| "loss": 0.8044, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.31839622641509435, | |
| "grad_norm": 0.8186471659261382, | |
| "learning_rate": 1.9458172417006346e-06, | |
| "loss": 0.7921, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.32075471698113206, | |
| "grad_norm": 1.040687215576322, | |
| "learning_rate": 1.944531628012579e-06, | |
| "loss": 0.8309, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.3231132075471698, | |
| "grad_norm": 0.8067576806018053, | |
| "learning_rate": 1.9432313753752194e-06, | |
| "loss": 0.8006, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.32547169811320753, | |
| "grad_norm": 1.1895556658600497, | |
| "learning_rate": 1.941916503940694e-06, | |
| "loss": 0.8055, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.3278301886792453, | |
| "grad_norm": 0.877104803725933, | |
| "learning_rate": 1.9405870340877135e-06, | |
| "loss": 0.815, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.330188679245283, | |
| "grad_norm": 1.0755873765351904, | |
| "learning_rate": 1.9392429864212433e-06, | |
| "loss": 0.8357, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.33254716981132076, | |
| "grad_norm": 0.8389051921712227, | |
| "learning_rate": 1.9378843817721854e-06, | |
| "loss": 0.8054, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.33490566037735847, | |
| "grad_norm": 0.8492954330518299, | |
| "learning_rate": 1.9365112411970546e-06, | |
| "loss": 0.8125, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.33726415094339623, | |
| "grad_norm": 0.8117108742521268, | |
| "learning_rate": 1.9351235859776537e-06, | |
| "loss": 0.7996, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.33962264150943394, | |
| "grad_norm": 0.8734747925195555, | |
| "learning_rate": 1.9337214376207417e-06, | |
| "loss": 0.7987, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.3419811320754717, | |
| "grad_norm": 0.870843122026526, | |
| "learning_rate": 1.932304817857702e-06, | |
| "loss": 0.8071, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.3443396226415094, | |
| "grad_norm": 0.7823387766729867, | |
| "learning_rate": 1.930873748644204e-06, | |
| "loss": 0.781, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.3466981132075472, | |
| "grad_norm": 1.0441516146481582, | |
| "learning_rate": 1.9294282521598657e-06, | |
| "loss": 0.8211, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.3490566037735849, | |
| "grad_norm": 0.8689106447919821, | |
| "learning_rate": 1.927968350807906e-06, | |
| "loss": 0.7827, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.35141509433962265, | |
| "grad_norm": 0.8369054883974562, | |
| "learning_rate": 1.9264940672148015e-06, | |
| "loss": 0.7987, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.35377358490566035, | |
| "grad_norm": 1.628472390625535, | |
| "learning_rate": 1.9250054242299326e-06, | |
| "loss": 0.8245, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.3561320754716981, | |
| "grad_norm": 0.8210347316488275, | |
| "learning_rate": 1.9235024449252305e-06, | |
| "loss": 0.8026, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.3584905660377358, | |
| "grad_norm": 0.8976272281891767, | |
| "learning_rate": 1.9219851525948203e-06, | |
| "loss": 0.8343, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.3608490566037736, | |
| "grad_norm": 0.8688278315807118, | |
| "learning_rate": 1.92045357075466e-06, | |
| "loss": 0.8378, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.3632075471698113, | |
| "grad_norm": 0.8259924861279039, | |
| "learning_rate": 1.9189077231421746e-06, | |
| "loss": 0.771, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.36556603773584906, | |
| "grad_norm": 0.850617328037459, | |
| "learning_rate": 1.917347633715889e-06, | |
| "loss": 0.8116, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.36792452830188677, | |
| "grad_norm": 0.830159427983018, | |
| "learning_rate": 1.915773326655057e-06, | |
| "loss": 0.8142, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.37028301886792453, | |
| "grad_norm": 0.9047033822843099, | |
| "learning_rate": 1.9141848263592873e-06, | |
| "loss": 0.7867, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.37264150943396224, | |
| "grad_norm": 0.8043279871245154, | |
| "learning_rate": 1.9125821574481623e-06, | |
| "loss": 0.7821, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 0.9855797219219302, | |
| "learning_rate": 1.9109653447608605e-06, | |
| "loss": 0.838, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.37735849056603776, | |
| "grad_norm": 0.8671907333946726, | |
| "learning_rate": 1.909334413355768e-06, | |
| "loss": 0.8199, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.37971698113207547, | |
| "grad_norm": 0.8038519157610383, | |
| "learning_rate": 1.9076893885100934e-06, | |
| "loss": 0.7914, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.38207547169811323, | |
| "grad_norm": 0.8534138715990678, | |
| "learning_rate": 1.906030295719473e-06, | |
| "loss": 0.803, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.38443396226415094, | |
| "grad_norm": 1.272101513607124, | |
| "learning_rate": 1.9043571606975775e-06, | |
| "loss": 0.8243, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.3867924528301887, | |
| "grad_norm": 0.8724688206578753, | |
| "learning_rate": 1.9026700093757129e-06, | |
| "loss": 0.8, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.3891509433962264, | |
| "grad_norm": 0.8272714642025372, | |
| "learning_rate": 1.9009688679024189e-06, | |
| "loss": 0.7941, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.3915094339622642, | |
| "grad_norm": 1.609360832798017, | |
| "learning_rate": 1.8992537626430636e-06, | |
| "loss": 0.8184, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.3938679245283019, | |
| "grad_norm": 1.068913128886596, | |
| "learning_rate": 1.897524720179434e-06, | |
| "loss": 0.7551, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.39622641509433965, | |
| "grad_norm": 0.876062488501413, | |
| "learning_rate": 1.8957817673093256e-06, | |
| "loss": 0.8202, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.39858490566037735, | |
| "grad_norm": 0.7804659272450435, | |
| "learning_rate": 1.894024931046125e-06, | |
| "loss": 0.8096, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.4009433962264151, | |
| "grad_norm": 1.1020958270083518, | |
| "learning_rate": 1.8922542386183939e-06, | |
| "loss": 0.8023, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.4033018867924528, | |
| "grad_norm": 0.8307091625451186, | |
| "learning_rate": 1.8904697174694446e-06, | |
| "loss": 0.797, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.4056603773584906, | |
| "grad_norm": 1.4660516537985, | |
| "learning_rate": 1.8886713952569156e-06, | |
| "loss": 0.8001, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.4080188679245283, | |
| "grad_norm": 0.9042037911584216, | |
| "learning_rate": 1.8868592998523436e-06, | |
| "loss": 0.7798, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.41037735849056606, | |
| "grad_norm": 0.8127436933722597, | |
| "learning_rate": 1.885033459340731e-06, | |
| "loss": 0.791, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.41273584905660377, | |
| "grad_norm": 0.8906980453056944, | |
| "learning_rate": 1.8831939020201096e-06, | |
| "loss": 0.8117, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.41509433962264153, | |
| "grad_norm": 0.7811131450862968, | |
| "learning_rate": 1.8813406564011044e-06, | |
| "loss": 0.742, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.41745283018867924, | |
| "grad_norm": 2.0285717851138974, | |
| "learning_rate": 1.8794737512064888e-06, | |
| "loss": 0.803, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.419811320754717, | |
| "grad_norm": 0.9681303046265977, | |
| "learning_rate": 1.8775932153707426e-06, | |
| "loss": 0.7857, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.4221698113207547, | |
| "grad_norm": 0.8312813398366425, | |
| "learning_rate": 1.8756990780396006e-06, | |
| "loss": 0.8091, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.42452830188679247, | |
| "grad_norm": 0.8325963532837448, | |
| "learning_rate": 1.8737913685696027e-06, | |
| "loss": 0.7936, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.4268867924528302, | |
| "grad_norm": 1.0242582555350113, | |
| "learning_rate": 1.8718701165276383e-06, | |
| "loss": 0.7679, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.42924528301886794, | |
| "grad_norm": 0.8568509432407337, | |
| "learning_rate": 1.869935351690488e-06, | |
| "loss": 0.7813, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.43160377358490565, | |
| "grad_norm": 1.1130973261677584, | |
| "learning_rate": 1.867987104044363e-06, | |
| "loss": 0.8173, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.4339622641509434, | |
| "grad_norm": 0.8076547906696275, | |
| "learning_rate": 1.8660254037844386e-06, | |
| "loss": 0.7823, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.4363207547169811, | |
| "grad_norm": 0.8462497637993398, | |
| "learning_rate": 1.864050281314388e-06, | |
| "loss": 0.7964, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.4386792452830189, | |
| "grad_norm": 0.8157104951412149, | |
| "learning_rate": 1.8620617672459096e-06, | |
| "loss": 0.7851, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.4410377358490566, | |
| "grad_norm": 0.8016398797843809, | |
| "learning_rate": 1.8600598923982537e-06, | |
| "loss": 0.7939, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.44339622641509435, | |
| "grad_norm": 0.9872324739847712, | |
| "learning_rate": 1.858044687797745e-06, | |
| "loss": 0.8105, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.44575471698113206, | |
| "grad_norm": 0.8070225439686687, | |
| "learning_rate": 1.8560161846773e-06, | |
| "loss": 0.7972, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.4481132075471698, | |
| "grad_norm": 0.8159111285487299, | |
| "learning_rate": 1.8539744144759447e-06, | |
| "loss": 0.7956, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.45047169811320753, | |
| "grad_norm": 0.8271007147512706, | |
| "learning_rate": 1.851919408838327e-06, | |
| "loss": 0.801, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.4528301886792453, | |
| "grad_norm": 0.8240075084440164, | |
| "learning_rate": 1.8498511996142253e-06, | |
| "loss": 0.8131, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.455188679245283, | |
| "grad_norm": 0.8100827961239685, | |
| "learning_rate": 1.8477698188580557e-06, | |
| "loss": 0.7953, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.45754716981132076, | |
| "grad_norm": 0.8090963621477647, | |
| "learning_rate": 1.8456752988283757e-06, | |
| "loss": 0.7713, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.45990566037735847, | |
| "grad_norm": 0.8646115465094427, | |
| "learning_rate": 1.8435676719873827e-06, | |
| "loss": 0.8218, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.46226415094339623, | |
| "grad_norm": 0.8283897647434588, | |
| "learning_rate": 1.8414469710004124e-06, | |
| "loss": 0.7937, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.46462264150943394, | |
| "grad_norm": 0.8130051258008277, | |
| "learning_rate": 1.839313228735431e-06, | |
| "loss": 0.7822, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.4669811320754717, | |
| "grad_norm": 0.8754673959382613, | |
| "learning_rate": 1.8371664782625285e-06, | |
| "loss": 0.7903, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.4693396226415094, | |
| "grad_norm": 1.287939397478769, | |
| "learning_rate": 1.8350067528534024e-06, | |
| "loss": 0.7718, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.4716981132075472, | |
| "grad_norm": 0.8236848990785427, | |
| "learning_rate": 1.8328340859808446e-06, | |
| "loss": 0.8012, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.4740566037735849, | |
| "grad_norm": 0.8938042537260998, | |
| "learning_rate": 1.8306485113182229e-06, | |
| "loss": 0.7721, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.47641509433962265, | |
| "grad_norm": 0.8496295751844541, | |
| "learning_rate": 1.8284500627389567e-06, | |
| "loss": 0.8043, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.47877358490566035, | |
| "grad_norm": 0.8325387527055562, | |
| "learning_rate": 1.8262387743159948e-06, | |
| "loss": 0.7936, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.4811320754716981, | |
| "grad_norm": 0.8207579274147835, | |
| "learning_rate": 1.824014680321285e-06, | |
| "loss": 0.8153, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.4834905660377358, | |
| "grad_norm": 0.9100175298073298, | |
| "learning_rate": 1.821777815225245e-06, | |
| "loss": 0.777, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.4858490566037736, | |
| "grad_norm": 0.9563101605978098, | |
| "learning_rate": 1.8195282136962264e-06, | |
| "loss": 0.7991, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.4882075471698113, | |
| "grad_norm": 0.8404681193002833, | |
| "learning_rate": 1.817265910599978e-06, | |
| "loss": 0.7968, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.49056603773584906, | |
| "grad_norm": 0.898412624742003, | |
| "learning_rate": 1.814990940999106e-06, | |
| "loss": 0.7585, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.49292452830188677, | |
| "grad_norm": 0.8495327539856999, | |
| "learning_rate": 1.81270334015253e-06, | |
| "loss": 0.8133, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.49528301886792453, | |
| "grad_norm": 1.1225882373174563, | |
| "learning_rate": 1.8104031435149362e-06, | |
| "loss": 0.8121, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.49764150943396224, | |
| "grad_norm": 0.9329252376646318, | |
| "learning_rate": 1.8080903867362293e-06, | |
| "loss": 0.815, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.0591364512578068, | |
| "learning_rate": 1.8057651056609782e-06, | |
| "loss": 0.7649, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "eval_loss": 0.714023768901825, | |
| "eval_runtime": 83.0267, | |
| "eval_samples_per_second": 8.491, | |
| "eval_steps_per_second": 0.542, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.5023584905660378, | |
| "grad_norm": 0.8614390398527076, | |
| "learning_rate": 1.803427336327861e-06, | |
| "loss": 0.8039, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.5047169811320755, | |
| "grad_norm": 0.903956531352285, | |
| "learning_rate": 1.8010771149691082e-06, | |
| "loss": 0.784, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.5070754716981132, | |
| "grad_norm": 0.8574588324038701, | |
| "learning_rate": 1.7987144780099376e-06, | |
| "loss": 0.7956, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.5094339622641509, | |
| "grad_norm": 0.8976197887421247, | |
| "learning_rate": 1.7963394620679942e-06, | |
| "loss": 0.8261, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.5117924528301887, | |
| "grad_norm": 0.8410804453164143, | |
| "learning_rate": 1.7939521039527781e-06, | |
| "loss": 0.7816, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.5141509433962265, | |
| "grad_norm": 0.903488950014599, | |
| "learning_rate": 1.7915524406650775e-06, | |
| "loss": 0.7701, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.5165094339622641, | |
| "grad_norm": 0.8374711197328489, | |
| "learning_rate": 1.7891405093963937e-06, | |
| "loss": 0.7761, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.5188679245283019, | |
| "grad_norm": 0.938187408138231, | |
| "learning_rate": 1.7867163475283646e-06, | |
| "loss": 0.8247, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5212264150943396, | |
| "grad_norm": 1.974177068847498, | |
| "learning_rate": 1.7842799926321863e-06, | |
| "loss": 0.7984, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.5235849056603774, | |
| "grad_norm": 0.8445314748349818, | |
| "learning_rate": 1.7818314824680298e-06, | |
| "loss": 0.7783, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.5259433962264151, | |
| "grad_norm": 0.8043844477824647, | |
| "learning_rate": 1.779370854984456e-06, | |
| "loss": 0.7748, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.5283018867924528, | |
| "grad_norm": 0.8199345888971309, | |
| "learning_rate": 1.7768981483178279e-06, | |
| "loss": 0.7961, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.5306603773584906, | |
| "grad_norm": 0.8198791332861125, | |
| "learning_rate": 1.7744134007917194e-06, | |
| "loss": 0.7809, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.5330188679245284, | |
| "grad_norm": 0.8038089067621491, | |
| "learning_rate": 1.7719166509163208e-06, | |
| "loss": 0.752, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.535377358490566, | |
| "grad_norm": 0.8115733374512414, | |
| "learning_rate": 1.7694079373878433e-06, | |
| "loss": 0.7977, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.5377358490566038, | |
| "grad_norm": 0.8325145007355951, | |
| "learning_rate": 1.7668872990879173e-06, | |
| "loss": 0.775, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.5400943396226415, | |
| "grad_norm": 0.8029142353641874, | |
| "learning_rate": 1.7643547750829918e-06, | |
| "loss": 0.7897, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.5424528301886793, | |
| "grad_norm": 0.8068710232788708, | |
| "learning_rate": 1.7618104046237274e-06, | |
| "loss": 0.7869, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.5448113207547169, | |
| "grad_norm": 0.8179053896349096, | |
| "learning_rate": 1.7592542271443887e-06, | |
| "loss": 0.7737, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.5471698113207547, | |
| "grad_norm": 0.9236859332969564, | |
| "learning_rate": 1.7566862822622328e-06, | |
| "loss": 0.7779, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.5495283018867925, | |
| "grad_norm": 0.8105651817226325, | |
| "learning_rate": 1.754106609776896e-06, | |
| "loss": 0.7841, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.5518867924528302, | |
| "grad_norm": 0.8955884533729873, | |
| "learning_rate": 1.7515152496697763e-06, | |
| "loss": 0.8077, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.5542452830188679, | |
| "grad_norm": 1.015222493683735, | |
| "learning_rate": 1.748912242103413e-06, | |
| "loss": 0.8013, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.5566037735849056, | |
| "grad_norm": 0.9309116956021466, | |
| "learning_rate": 1.746297627420866e-06, | |
| "loss": 0.7672, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.5589622641509434, | |
| "grad_norm": 0.9599117437990355, | |
| "learning_rate": 1.7436714461450897e-06, | |
| "loss": 0.7957, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.5613207547169812, | |
| "grad_norm": 0.8340801449927656, | |
| "learning_rate": 1.7410337389783041e-06, | |
| "loss": 0.7994, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.5636792452830188, | |
| "grad_norm": 0.788362523807564, | |
| "learning_rate": 1.7383845468013654e-06, | |
| "loss": 0.7707, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.5660377358490566, | |
| "grad_norm": 0.8572171206767668, | |
| "learning_rate": 1.7357239106731317e-06, | |
| "loss": 0.7931, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.5683962264150944, | |
| "grad_norm": 4.180247592376519, | |
| "learning_rate": 1.733051871829826e-06, | |
| "loss": 0.798, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.5707547169811321, | |
| "grad_norm": 0.8738642652249169, | |
| "learning_rate": 1.7303684716843995e-06, | |
| "loss": 0.7768, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.5731132075471698, | |
| "grad_norm": 0.8413513234438594, | |
| "learning_rate": 1.727673751825886e-06, | |
| "loss": 0.8139, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.5754716981132075, | |
| "grad_norm": 0.8032830413975406, | |
| "learning_rate": 1.7249677540187609e-06, | |
| "loss": 0.7877, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.5778301886792453, | |
| "grad_norm": 0.7992641395875911, | |
| "learning_rate": 1.7222505202022913e-06, | |
| "loss": 0.7712, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.5801886792452831, | |
| "grad_norm": 0.8865998909749021, | |
| "learning_rate": 1.7195220924898882e-06, | |
| "loss": 0.7584, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.5825471698113207, | |
| "grad_norm": 0.8256395052056026, | |
| "learning_rate": 1.7167825131684511e-06, | |
| "loss": 0.7905, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.5849056603773585, | |
| "grad_norm": 0.8134932247816357, | |
| "learning_rate": 1.7140318246977163e-06, | |
| "loss": 0.7932, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.5872641509433962, | |
| "grad_norm": 0.8289362753913454, | |
| "learning_rate": 1.7112700697095953e-06, | |
| "loss": 0.7727, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.589622641509434, | |
| "grad_norm": 0.7919440339387878, | |
| "learning_rate": 1.7084972910075154e-06, | |
| "loss": 0.819, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.5919811320754716, | |
| "grad_norm": 0.7892796730141778, | |
| "learning_rate": 1.7057135315657567e-06, | |
| "loss": 0.7994, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.5943396226415094, | |
| "grad_norm": 0.818593844058535, | |
| "learning_rate": 1.7029188345287865e-06, | |
| "loss": 0.7833, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.5966981132075472, | |
| "grad_norm": 0.8550235074282425, | |
| "learning_rate": 1.7001132432105894e-06, | |
| "loss": 0.7627, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.5990566037735849, | |
| "grad_norm": 0.7825131867584777, | |
| "learning_rate": 1.6972968010939952e-06, | |
| "loss": 0.7863, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.6014150943396226, | |
| "grad_norm": 0.7872637454110324, | |
| "learning_rate": 1.6944695518300084e-06, | |
| "loss": 0.8202, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.6037735849056604, | |
| "grad_norm": 0.8264865629926671, | |
| "learning_rate": 1.6916315392371283e-06, | |
| "loss": 0.7794, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.6061320754716981, | |
| "grad_norm": 0.8112171041686048, | |
| "learning_rate": 1.688782807300671e-06, | |
| "loss": 0.7975, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.6084905660377359, | |
| "grad_norm": 0.8017599214256371, | |
| "learning_rate": 1.685923400172088e-06, | |
| "loss": 0.7821, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.6108490566037735, | |
| "grad_norm": 0.7855915924335556, | |
| "learning_rate": 1.683053362168282e-06, | |
| "loss": 0.7834, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.6132075471698113, | |
| "grad_norm": 0.8227864852025443, | |
| "learning_rate": 1.6801727377709191e-06, | |
| "loss": 0.7715, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6155660377358491, | |
| "grad_norm": 0.8347676603852344, | |
| "learning_rate": 1.6772815716257411e-06, | |
| "loss": 0.7783, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.6179245283018868, | |
| "grad_norm": 0.847840576553053, | |
| "learning_rate": 1.6743799085418709e-06, | |
| "loss": 0.7796, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.6202830188679245, | |
| "grad_norm": 0.8187554443504805, | |
| "learning_rate": 1.671467793491121e-06, | |
| "loss": 0.8223, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.6226415094339622, | |
| "grad_norm": 0.8560599665847224, | |
| "learning_rate": 1.6685452716072942e-06, | |
| "loss": 0.7769, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.8756516850796994, | |
| "learning_rate": 1.6656123881854858e-06, | |
| "loss": 0.782, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.6273584905660378, | |
| "grad_norm": 0.861305723586375, | |
| "learning_rate": 1.6626691886813802e-06, | |
| "loss": 0.7907, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.6297169811320755, | |
| "grad_norm": 0.8565858633218812, | |
| "learning_rate": 1.6597157187105474e-06, | |
| "loss": 0.7586, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.6320754716981132, | |
| "grad_norm": 0.7796005038585514, | |
| "learning_rate": 1.6567520240477343e-06, | |
| "loss": 0.7996, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.6344339622641509, | |
| "grad_norm": 0.9853760454692906, | |
| "learning_rate": 1.6537781506261586e-06, | |
| "loss": 0.803, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.6367924528301887, | |
| "grad_norm": 0.8214336899114236, | |
| "learning_rate": 1.6507941445367934e-06, | |
| "loss": 0.7484, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.6391509433962265, | |
| "grad_norm": 0.8871384572382998, | |
| "learning_rate": 1.647800052027655e-06, | |
| "loss": 0.7876, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.6415094339622641, | |
| "grad_norm": 0.8564572450153076, | |
| "learning_rate": 1.6447959195030849e-06, | |
| "loss": 0.8054, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.6438679245283019, | |
| "grad_norm": 0.8408989291477506, | |
| "learning_rate": 1.6417817935230316e-06, | |
| "loss": 0.7668, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.6462264150943396, | |
| "grad_norm": 0.7945270947755096, | |
| "learning_rate": 1.6387577208023279e-06, | |
| "loss": 0.7893, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.6485849056603774, | |
| "grad_norm": 0.8844404937902336, | |
| "learning_rate": 1.6357237482099683e-06, | |
| "loss": 0.794, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.6509433962264151, | |
| "grad_norm": 0.8518070202862351, | |
| "learning_rate": 1.6326799227683803e-06, | |
| "loss": 0.7967, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.6533018867924528, | |
| "grad_norm": 0.819470621959587, | |
| "learning_rate": 1.6296262916526995e-06, | |
| "loss": 0.7633, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.6556603773584906, | |
| "grad_norm": 0.7989369195204531, | |
| "learning_rate": 1.626562902190034e-06, | |
| "loss": 0.807, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.6580188679245284, | |
| "grad_norm": 0.9412075176216419, | |
| "learning_rate": 1.6234898018587336e-06, | |
| "loss": 0.7752, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.660377358490566, | |
| "grad_norm": 0.7895838126966979, | |
| "learning_rate": 1.6204070382876531e-06, | |
| "loss": 0.7786, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.6627358490566038, | |
| "grad_norm": 0.7981016976096619, | |
| "learning_rate": 1.6173146592554152e-06, | |
| "loss": 0.7958, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.6650943396226415, | |
| "grad_norm": 0.8907323576889667, | |
| "learning_rate": 1.6142127126896679e-06, | |
| "loss": 0.807, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.6674528301886793, | |
| "grad_norm": 0.8053567442309949, | |
| "learning_rate": 1.6111012466663434e-06, | |
| "loss": 0.788, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.6698113207547169, | |
| "grad_norm": 0.8378739800286575, | |
| "learning_rate": 1.6079803094089125e-06, | |
| "loss": 0.736, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.6721698113207547, | |
| "grad_norm": 0.8858100985238717, | |
| "learning_rate": 1.6048499492876375e-06, | |
| "loss": 0.8106, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.6745283018867925, | |
| "grad_norm": 0.8276972316955101, | |
| "learning_rate": 1.6017102148188215e-06, | |
| "loss": 0.7835, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.6768867924528302, | |
| "grad_norm": 0.8853131510592382, | |
| "learning_rate": 1.598561154664058e-06, | |
| "loss": 0.7965, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.6792452830188679, | |
| "grad_norm": 0.8786321199638693, | |
| "learning_rate": 1.5954028176294746e-06, | |
| "loss": 0.8268, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.6816037735849056, | |
| "grad_norm": 0.8669272571254311, | |
| "learning_rate": 1.5922352526649801e-06, | |
| "loss": 0.7823, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.6839622641509434, | |
| "grad_norm": 0.9644127048111335, | |
| "learning_rate": 1.589058508863501e-06, | |
| "loss": 0.78, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.6863207547169812, | |
| "grad_norm": 0.8073310279373501, | |
| "learning_rate": 1.5858726354602248e-06, | |
| "loss": 0.7765, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.6886792452830188, | |
| "grad_norm": 0.8775721979922628, | |
| "learning_rate": 1.5826776818318348e-06, | |
| "loss": 0.7597, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.6910377358490566, | |
| "grad_norm": 1.015547726125361, | |
| "learning_rate": 1.5794736974957464e-06, | |
| "loss": 0.786, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.6933962264150944, | |
| "grad_norm": 0.7708601481727361, | |
| "learning_rate": 1.5762607321093366e-06, | |
| "loss": 0.7817, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.6957547169811321, | |
| "grad_norm": 0.8426421786890617, | |
| "learning_rate": 1.5730388354691785e-06, | |
| "loss": 0.7847, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.6981132075471698, | |
| "grad_norm": 0.7855014020345946, | |
| "learning_rate": 1.569808057510266e-06, | |
| "loss": 0.7748, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.7004716981132075, | |
| "grad_norm": 0.8389890263875862, | |
| "learning_rate": 1.5665684483052424e-06, | |
| "loss": 0.7856, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.7028301886792453, | |
| "grad_norm": 0.8482309028462507, | |
| "learning_rate": 1.563320058063622e-06, | |
| "loss": 0.7469, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.7051886792452831, | |
| "grad_norm": 1.0888639918368348, | |
| "learning_rate": 1.5600629371310144e-06, | |
| "loss": 0.7905, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.7075471698113207, | |
| "grad_norm": 0.8011947485403833, | |
| "learning_rate": 1.556797135988342e-06, | |
| "loss": 0.7902, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.7099056603773585, | |
| "grad_norm": 0.7584432625187374, | |
| "learning_rate": 1.5535227052510588e-06, | |
| "loss": 0.7935, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.7122641509433962, | |
| "grad_norm": 1.1353449141437797, | |
| "learning_rate": 1.5502396956683667e-06, | |
| "loss": 0.7992, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.714622641509434, | |
| "grad_norm": 0.8144382945981636, | |
| "learning_rate": 1.5469481581224271e-06, | |
| "loss": 0.761, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.7169811320754716, | |
| "grad_norm": 0.9448262210280223, | |
| "learning_rate": 1.5436481436275724e-06, | |
| "loss": 0.7839, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.7193396226415094, | |
| "grad_norm": 0.8145609006654517, | |
| "learning_rate": 1.5403397033295179e-06, | |
| "loss": 0.7975, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.7216981132075472, | |
| "grad_norm": 0.9825256520905712, | |
| "learning_rate": 1.537022888504566e-06, | |
| "loss": 0.7918, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.7240566037735849, | |
| "grad_norm": 0.9647183394075436, | |
| "learning_rate": 1.5336977505588134e-06, | |
| "loss": 0.7973, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.7264150943396226, | |
| "grad_norm": 0.802175509091533, | |
| "learning_rate": 1.530364341027354e-06, | |
| "loss": 0.7928, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.7287735849056604, | |
| "grad_norm": 0.8122620856594104, | |
| "learning_rate": 1.5270227115734789e-06, | |
| "loss": 0.7767, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.7311320754716981, | |
| "grad_norm": 0.8656607812881103, | |
| "learning_rate": 1.5236729139878778e-06, | |
| "loss": 0.7746, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.7334905660377359, | |
| "grad_norm": 0.8721224552709389, | |
| "learning_rate": 1.5203150001878353e-06, | |
| "loss": 0.7929, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.7358490566037735, | |
| "grad_norm": 0.7856050732773205, | |
| "learning_rate": 1.5169490222164254e-06, | |
| "loss": 0.7733, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.7382075471698113, | |
| "grad_norm": 0.8639502364522528, | |
| "learning_rate": 1.5135750322417066e-06, | |
| "loss": 0.7958, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.7405660377358491, | |
| "grad_norm": 0.9986722246195658, | |
| "learning_rate": 1.5101930825559124e-06, | |
| "loss": 0.7944, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.7429245283018868, | |
| "grad_norm": 0.8861549532629037, | |
| "learning_rate": 1.50680322557464e-06, | |
| "loss": 0.7887, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.7452830188679245, | |
| "grad_norm": 0.8526619540985027, | |
| "learning_rate": 1.5034055138360398e-06, | |
| "loss": 0.7388, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.7476415094339622, | |
| "grad_norm": 0.8760084110715984, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.7719, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.8038573705056793, | |
| "learning_rate": 1.4965867368473306e-06, | |
| "loss": 0.7514, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "eval_loss": 0.6982797384262085, | |
| "eval_runtime": 83.2645, | |
| "eval_samples_per_second": 8.467, | |
| "eval_steps_per_second": 0.54, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.7523584905660378, | |
| "grad_norm": 0.8193168846563599, | |
| "learning_rate": 1.4931657772789457e-06, | |
| "loss": 0.7834, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.7547169811320755, | |
| "grad_norm": 0.9092282687501353, | |
| "learning_rate": 1.4897371743150423e-06, | |
| "loss": 0.753, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7570754716981132, | |
| "grad_norm": 0.944599516853919, | |
| "learning_rate": 1.4863009810942813e-06, | |
| "loss": 0.7855, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.7594339622641509, | |
| "grad_norm": 0.9097780581691608, | |
| "learning_rate": 1.4828572508729606e-06, | |
| "loss": 0.7837, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.7617924528301887, | |
| "grad_norm": 0.8635246603088246, | |
| "learning_rate": 1.479406037024192e-06, | |
| "loss": 0.7862, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.7641509433962265, | |
| "grad_norm": 0.8041740773336183, | |
| "learning_rate": 1.4759473930370736e-06, | |
| "loss": 0.7705, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.7665094339622641, | |
| "grad_norm": 0.8563734416095982, | |
| "learning_rate": 1.4724813725158596e-06, | |
| "loss": 0.7944, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.7688679245283019, | |
| "grad_norm": 0.7885016820225573, | |
| "learning_rate": 1.4690080291791312e-06, | |
| "loss": 0.78, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.7712264150943396, | |
| "grad_norm": 0.9769999602785767, | |
| "learning_rate": 1.4655274168589633e-06, | |
| "loss": 0.7506, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.7735849056603774, | |
| "grad_norm": 0.8241115466345136, | |
| "learning_rate": 1.462039589500089e-06, | |
| "loss": 0.7826, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.7759433962264151, | |
| "grad_norm": 0.8261080001645498, | |
| "learning_rate": 1.4585446011590658e-06, | |
| "loss": 0.7592, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.7783018867924528, | |
| "grad_norm": 1.1493611817217861, | |
| "learning_rate": 1.4550425060034365e-06, | |
| "loss": 0.7674, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.7806603773584906, | |
| "grad_norm": 0.8343201494559767, | |
| "learning_rate": 1.4515333583108893e-06, | |
| "loss": 0.8072, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.7830188679245284, | |
| "grad_norm": 1.0664747829909942, | |
| "learning_rate": 1.4480172124684172e-06, | |
| "loss": 0.7802, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.785377358490566, | |
| "grad_norm": 0.7792110149695123, | |
| "learning_rate": 1.4444941229714758e-06, | |
| "loss": 0.7704, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.7877358490566038, | |
| "grad_norm": 0.8718279852088779, | |
| "learning_rate": 1.440964144423136e-06, | |
| "loss": 0.7963, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.7900943396226415, | |
| "grad_norm": 1.02769301319547, | |
| "learning_rate": 1.4374273315332415e-06, | |
| "loss": 0.7863, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.7924528301886793, | |
| "grad_norm": 0.7843647199877115, | |
| "learning_rate": 1.433883739117558e-06, | |
| "loss": 0.7705, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.7948113207547169, | |
| "grad_norm": 0.815818120801617, | |
| "learning_rate": 1.430333422096925e-06, | |
| "loss": 0.7651, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.7971698113207547, | |
| "grad_norm": 0.7835815188776669, | |
| "learning_rate": 1.4267764354964037e-06, | |
| "loss": 0.789, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.7995283018867925, | |
| "grad_norm": 0.8382956450308263, | |
| "learning_rate": 1.423212834444425e-06, | |
| "loss": 0.817, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.8018867924528302, | |
| "grad_norm": 0.9606287069411898, | |
| "learning_rate": 1.4196426741719345e-06, | |
| "loss": 0.7801, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.8042452830188679, | |
| "grad_norm": 0.7833416575809102, | |
| "learning_rate": 1.4160660100115373e-06, | |
| "loss": 0.8139, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.8066037735849056, | |
| "grad_norm": 0.8112490810199738, | |
| "learning_rate": 1.4124828973966392e-06, | |
| "loss": 0.7951, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.8089622641509434, | |
| "grad_norm": 0.8533925894384254, | |
| "learning_rate": 1.4088933918605887e-06, | |
| "loss": 0.7555, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.8113207547169812, | |
| "grad_norm": 0.8917123790523681, | |
| "learning_rate": 1.405297549035816e-06, | |
| "loss": 0.7941, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.8136792452830188, | |
| "grad_norm": 0.8690146736748873, | |
| "learning_rate": 1.4016954246529694e-06, | |
| "loss": 0.7966, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.8160377358490566, | |
| "grad_norm": 0.8372555162768984, | |
| "learning_rate": 1.3980870745400544e-06, | |
| "loss": 0.7972, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.8183962264150944, | |
| "grad_norm": 0.8356712411477044, | |
| "learning_rate": 1.3944725546215662e-06, | |
| "loss": 0.753, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.8207547169811321, | |
| "grad_norm": 0.8366058600346219, | |
| "learning_rate": 1.3908519209176225e-06, | |
| "loss": 0.7813, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.8231132075471698, | |
| "grad_norm": 0.8373524566281616, | |
| "learning_rate": 1.3872252295430986e-06, | |
| "loss": 0.7956, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.8254716981132075, | |
| "grad_norm": 0.7833296789989939, | |
| "learning_rate": 1.3835925367067529e-06, | |
| "loss": 0.7449, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.8278301886792453, | |
| "grad_norm": 0.7981265071211454, | |
| "learning_rate": 1.37995389871036e-06, | |
| "loss": 0.7756, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.8301886792452831, | |
| "grad_norm": 0.8840540972881347, | |
| "learning_rate": 1.3763093719478357e-06, | |
| "loss": 0.7984, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.8325471698113207, | |
| "grad_norm": 0.985313340190558, | |
| "learning_rate": 1.372659012904363e-06, | |
| "loss": 0.7821, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.8349056603773585, | |
| "grad_norm": 0.8138177618630202, | |
| "learning_rate": 1.369002878155519e-06, | |
| "loss": 0.7618, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.8372641509433962, | |
| "grad_norm": 0.7948448582342686, | |
| "learning_rate": 1.3653410243663951e-06, | |
| "loss": 0.7559, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.839622641509434, | |
| "grad_norm": 0.8151246949343306, | |
| "learning_rate": 1.3616735082907196e-06, | |
| "loss": 0.7722, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.8419811320754716, | |
| "grad_norm": 0.8162887157403834, | |
| "learning_rate": 1.35800038676998e-06, | |
| "loss": 0.7711, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.8443396226415094, | |
| "grad_norm": 1.1322690706169278, | |
| "learning_rate": 1.3543217167325388e-06, | |
| "loss": 0.762, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.8466981132075472, | |
| "grad_norm": 0.8102205098745584, | |
| "learning_rate": 1.3506375551927544e-06, | |
| "loss": 0.7532, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.8490566037735849, | |
| "grad_norm": 0.9369994865443904, | |
| "learning_rate": 1.3469479592500951e-06, | |
| "loss": 0.737, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.8514150943396226, | |
| "grad_norm": 0.9319609634032745, | |
| "learning_rate": 1.3432529860882556e-06, | |
| "loss": 0.8074, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.8537735849056604, | |
| "grad_norm": 0.844911038169109, | |
| "learning_rate": 1.3395526929742691e-06, | |
| "loss": 0.7576, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.8561320754716981, | |
| "grad_norm": 0.8213852435481517, | |
| "learning_rate": 1.3358471372576227e-06, | |
| "loss": 0.8091, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.8584905660377359, | |
| "grad_norm": 0.8679917359639049, | |
| "learning_rate": 1.3321363763693643e-06, | |
| "loss": 0.7649, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.8608490566037735, | |
| "grad_norm": 0.863728546344944, | |
| "learning_rate": 1.3284204678212167e-06, | |
| "loss": 0.7364, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.8632075471698113, | |
| "grad_norm": 0.9151925878031607, | |
| "learning_rate": 1.3246994692046835e-06, | |
| "loss": 0.7617, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.8655660377358491, | |
| "grad_norm": 0.7676651686086633, | |
| "learning_rate": 1.3209734381901578e-06, | |
| "loss": 0.7703, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.8679245283018868, | |
| "grad_norm": 0.79154626898034, | |
| "learning_rate": 1.3172424325260272e-06, | |
| "loss": 0.7816, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.8702830188679245, | |
| "grad_norm": 0.8831690649426513, | |
| "learning_rate": 1.3135065100377814e-06, | |
| "loss": 0.7646, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.8726415094339622, | |
| "grad_norm": 0.8098189405568664, | |
| "learning_rate": 1.3097657286271116e-06, | |
| "loss": 0.7673, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.875, | |
| "grad_norm": 0.8071739591609438, | |
| "learning_rate": 1.3060201462710176e-06, | |
| "loss": 0.7438, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.8773584905660378, | |
| "grad_norm": 0.8864737282217798, | |
| "learning_rate": 1.3022698210209066e-06, | |
| "loss": 0.7393, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.8797169811320755, | |
| "grad_norm": 0.8193555244410853, | |
| "learning_rate": 1.2985148110016947e-06, | |
| "loss": 0.7658, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.8820754716981132, | |
| "grad_norm": 0.7819668232955883, | |
| "learning_rate": 1.2947551744109043e-06, | |
| "loss": 0.7876, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.8844339622641509, | |
| "grad_norm": 0.9661764152661253, | |
| "learning_rate": 1.2909909695177645e-06, | |
| "loss": 0.7556, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.8867924528301887, | |
| "grad_norm": 0.7916178501632968, | |
| "learning_rate": 1.2872222546623065e-06, | |
| "loss": 0.7776, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.8891509433962265, | |
| "grad_norm": 0.908391562439544, | |
| "learning_rate": 1.2834490882544598e-06, | |
| "loss": 0.7604, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.8915094339622641, | |
| "grad_norm": 0.7673617521747667, | |
| "learning_rate": 1.2796715287731461e-06, | |
| "loss": 0.7786, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.8938679245283019, | |
| "grad_norm": 1.1737766347467777, | |
| "learning_rate": 1.2758896347653752e-06, | |
| "loss": 0.7692, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.8962264150943396, | |
| "grad_norm": 0.7823714073715833, | |
| "learning_rate": 1.272103464845335e-06, | |
| "loss": 0.7389, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.8985849056603774, | |
| "grad_norm": 1.0030453246218272, | |
| "learning_rate": 1.2683130776934848e-06, | |
| "loss": 0.7556, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.9009433962264151, | |
| "grad_norm": 0.9688917314182516, | |
| "learning_rate": 1.2645185320556444e-06, | |
| "loss": 0.7415, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.9033018867924528, | |
| "grad_norm": 0.8539895113482738, | |
| "learning_rate": 1.2607198867420858e-06, | |
| "loss": 0.7772, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.9056603773584906, | |
| "grad_norm": 0.8011530846319137, | |
| "learning_rate": 1.256917200626619e-06, | |
| "loss": 0.7426, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.9080188679245284, | |
| "grad_norm": 0.9859270894449156, | |
| "learning_rate": 1.253110532645682e-06, | |
| "loss": 0.7942, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.910377358490566, | |
| "grad_norm": 0.7970067775774904, | |
| "learning_rate": 1.2492999417974253e-06, | |
| "loss": 0.7914, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.9127358490566038, | |
| "grad_norm": 0.9049836680757578, | |
| "learning_rate": 1.245485487140799e-06, | |
| "loss": 0.7787, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.9150943396226415, | |
| "grad_norm": 0.866322963165458, | |
| "learning_rate": 1.2416672277946373e-06, | |
| "loss": 0.781, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.9174528301886793, | |
| "grad_norm": 0.9269306886834423, | |
| "learning_rate": 1.2378452229367413e-06, | |
| "loss": 0.808, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.9198113207547169, | |
| "grad_norm": 0.8286820002413788, | |
| "learning_rate": 1.2340195318029622e-06, | |
| "loss": 0.7623, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.9221698113207547, | |
| "grad_norm": 0.8143258703482895, | |
| "learning_rate": 1.2301902136862848e-06, | |
| "loss": 0.7965, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.9245283018867925, | |
| "grad_norm": 0.8041175896213196, | |
| "learning_rate": 1.2263573279359053e-06, | |
| "loss": 0.7613, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.9268867924528302, | |
| "grad_norm": 0.9087369855701409, | |
| "learning_rate": 1.2225209339563143e-06, | |
| "loss": 0.7669, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.9292452830188679, | |
| "grad_norm": 0.8126237661274147, | |
| "learning_rate": 1.2186810912063758e-06, | |
| "loss": 0.7531, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.9316037735849056, | |
| "grad_norm": 0.8354891878817764, | |
| "learning_rate": 1.214837859198404e-06, | |
| "loss": 0.768, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.9339622641509434, | |
| "grad_norm": 0.7793219061787805, | |
| "learning_rate": 1.2109912974972422e-06, | |
| "loss": 0.7831, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.9363207547169812, | |
| "grad_norm": 0.7813175142444524, | |
| "learning_rate": 1.2071414657193396e-06, | |
| "loss": 0.7671, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.9386792452830188, | |
| "grad_norm": 0.8960835581240535, | |
| "learning_rate": 1.2032884235318268e-06, | |
| "loss": 0.775, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.9410377358490566, | |
| "grad_norm": 0.8773817072709057, | |
| "learning_rate": 1.1994322306515925e-06, | |
| "loss": 0.7915, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.9433962264150944, | |
| "grad_norm": 0.8191284697051531, | |
| "learning_rate": 1.1955729468443546e-06, | |
| "loss": 0.7548, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.9457547169811321, | |
| "grad_norm": 0.8704070228218153, | |
| "learning_rate": 1.1917106319237384e-06, | |
| "loss": 0.773, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.9481132075471698, | |
| "grad_norm": 0.8119790656590767, | |
| "learning_rate": 1.1878453457503464e-06, | |
| "loss": 0.7933, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.9504716981132075, | |
| "grad_norm": 0.8290570173312977, | |
| "learning_rate": 1.1839771482308308e-06, | |
| "loss": 0.7515, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.9528301886792453, | |
| "grad_norm": 0.7808059259378133, | |
| "learning_rate": 1.1801060993169666e-06, | |
| "loss": 0.771, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.9551886792452831, | |
| "grad_norm": 0.8554505156061943, | |
| "learning_rate": 1.1762322590047219e-06, | |
| "loss": 0.767, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.9575471698113207, | |
| "grad_norm": 0.8132652328205165, | |
| "learning_rate": 1.172355687333326e-06, | |
| "loss": 0.7467, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.9599056603773585, | |
| "grad_norm": 0.7767004491954446, | |
| "learning_rate": 1.168476444384342e-06, | |
| "loss": 0.7667, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.9622641509433962, | |
| "grad_norm": 0.8619052365176207, | |
| "learning_rate": 1.164594590280734e-06, | |
| "loss": 0.7993, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.964622641509434, | |
| "grad_norm": 0.8115144668833016, | |
| "learning_rate": 1.1607101851859345e-06, | |
| "loss": 0.7787, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.9669811320754716, | |
| "grad_norm": 0.8000429071733963, | |
| "learning_rate": 1.156823289302914e-06, | |
| "loss": 0.7767, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.9693396226415094, | |
| "grad_norm": 0.8228751842965996, | |
| "learning_rate": 1.152933962873246e-06, | |
| "loss": 0.7829, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.9716981132075472, | |
| "grad_norm": 0.8685312714228164, | |
| "learning_rate": 1.1490422661761743e-06, | |
| "loss": 0.7775, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.9740566037735849, | |
| "grad_norm": 0.9383805515060808, | |
| "learning_rate": 1.1451482595276796e-06, | |
| "loss": 0.7716, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.9764150943396226, | |
| "grad_norm": 0.978907741116596, | |
| "learning_rate": 1.1412520032795419e-06, | |
| "loss": 0.79, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.9787735849056604, | |
| "grad_norm": 0.936909408413621, | |
| "learning_rate": 1.1373535578184082e-06, | |
| "loss": 0.7463, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.9811320754716981, | |
| "grad_norm": 0.7779360289097986, | |
| "learning_rate": 1.1334529835648552e-06, | |
| "loss": 0.7544, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.9834905660377359, | |
| "grad_norm": 0.8442404415567627, | |
| "learning_rate": 1.1295503409724525e-06, | |
| "loss": 0.7968, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.9858490566037735, | |
| "grad_norm": 0.8856697438669298, | |
| "learning_rate": 1.1256456905268263e-06, | |
| "loss": 0.7622, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.9882075471698113, | |
| "grad_norm": 0.8362007813195715, | |
| "learning_rate": 1.1217390927447225e-06, | |
| "loss": 0.7475, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.9905660377358491, | |
| "grad_norm": 0.7824485352158383, | |
| "learning_rate": 1.1178306081730664e-06, | |
| "loss": 0.7705, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.9929245283018868, | |
| "grad_norm": 0.7992205810856007, | |
| "learning_rate": 1.113920297388028e-06, | |
| "loss": 0.7545, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.9952830188679245, | |
| "grad_norm": 0.785520038953663, | |
| "learning_rate": 1.1100082209940793e-06, | |
| "loss": 0.7647, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.9976415094339622, | |
| "grad_norm": 0.8327122327201606, | |
| "learning_rate": 1.106094439623058e-06, | |
| "loss": 0.7632, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.7736149741657676, | |
| "learning_rate": 1.1021790139332264e-06, | |
| "loss": 0.7861, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.6892534494400024, | |
| "eval_runtime": 81.9429, | |
| "eval_samples_per_second": 8.604, | |
| "eval_steps_per_second": 0.549, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.0023584905660377, | |
| "grad_norm": 0.8035685998252425, | |
| "learning_rate": 1.0982620046083305e-06, | |
| "loss": 0.7506, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.0047169811320755, | |
| "grad_norm": 1.001273686570143, | |
| "learning_rate": 1.0943434723566623e-06, | |
| "loss": 0.783, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.0070754716981132, | |
| "grad_norm": 0.7620350953234064, | |
| "learning_rate": 1.0904234779101158e-06, | |
| "loss": 0.7436, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.009433962264151, | |
| "grad_norm": 0.8051824532909724, | |
| "learning_rate": 1.0865020820232466e-06, | |
| "loss": 0.7408, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.0117924528301887, | |
| "grad_norm": 1.1441709749307019, | |
| "learning_rate": 1.0825793454723324e-06, | |
| "loss": 0.7619, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.0141509433962264, | |
| "grad_norm": 0.934287576351282, | |
| "learning_rate": 1.0786553290544278e-06, | |
| "loss": 0.7291, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.0165094339622642, | |
| "grad_norm": 0.867146390883491, | |
| "learning_rate": 1.0747300935864243e-06, | |
| "loss": 0.7697, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.0188679245283019, | |
| "grad_norm": 0.7934743429540405, | |
| "learning_rate": 1.070803699904107e-06, | |
| "loss": 0.7774, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.0212264150943395, | |
| "grad_norm": 1.204890281675565, | |
| "learning_rate": 1.0668762088612114e-06, | |
| "loss": 0.7659, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.0235849056603774, | |
| "grad_norm": 0.7748660244184836, | |
| "learning_rate": 1.0629476813284807e-06, | |
| "loss": 0.7812, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.025943396226415, | |
| "grad_norm": 0.8559552505505363, | |
| "learning_rate": 1.0590181781927227e-06, | |
| "loss": 0.7641, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.028301886792453, | |
| "grad_norm": 0.8278708288686452, | |
| "learning_rate": 1.0550877603558654e-06, | |
| "loss": 0.7339, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.0306603773584906, | |
| "grad_norm": 0.8199114258570241, | |
| "learning_rate": 1.0511564887340135e-06, | |
| "loss": 0.7884, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.0330188679245282, | |
| "grad_norm": 0.8220578039981541, | |
| "learning_rate": 1.0472244242565034e-06, | |
| "loss": 0.7636, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.0353773584905661, | |
| "grad_norm": 0.9899659514811034, | |
| "learning_rate": 1.043291627864961e-06, | |
| "loss": 0.7507, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.0377358490566038, | |
| "grad_norm": 0.9164260231900845, | |
| "learning_rate": 1.0393581605123552e-06, | |
| "loss": 0.7604, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.0400943396226414, | |
| "grad_norm": 0.780514004370331, | |
| "learning_rate": 1.035424083162054e-06, | |
| "loss": 0.7661, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.0424528301886793, | |
| "grad_norm": 0.9654942156178127, | |
| "learning_rate": 1.031489456786879e-06, | |
| "loss": 0.732, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.044811320754717, | |
| "grad_norm": 0.9028629851443349, | |
| "learning_rate": 1.0275543423681621e-06, | |
| "loss": 0.7704, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.0471698113207548, | |
| "grad_norm": 0.8997990850767794, | |
| "learning_rate": 1.0236188008947978e-06, | |
| "loss": 0.7832, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.0495283018867925, | |
| "grad_norm": 0.8380399486601815, | |
| "learning_rate": 1.0196828933623008e-06, | |
| "loss": 0.7513, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.0518867924528301, | |
| "grad_norm": 0.818805158462816, | |
| "learning_rate": 1.0157466807718577e-06, | |
| "loss": 0.7595, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.054245283018868, | |
| "grad_norm": 0.8325448461017404, | |
| "learning_rate": 1.0118102241293847e-06, | |
| "loss": 0.7634, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.0566037735849056, | |
| "grad_norm": 0.7936938025099423, | |
| "learning_rate": 1.0078735844445788e-06, | |
| "loss": 0.768, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.0589622641509433, | |
| "grad_norm": 0.8378311364099181, | |
| "learning_rate": 1.0039368227299753e-06, | |
| "loss": 0.7599, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.0613207547169812, | |
| "grad_norm": 0.8299911067496856, | |
| "learning_rate": 1e-06, | |
| "loss": 0.7488, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.0636792452830188, | |
| "grad_norm": 0.8677844259185759, | |
| "learning_rate": 9.960631772700248e-07, | |
| "loss": 0.7698, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.0660377358490567, | |
| "grad_norm": 0.8463491375440975, | |
| "learning_rate": 9.92126415555421e-07, | |
| "loss": 0.7493, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.0683962264150944, | |
| "grad_norm": 0.8004441596711467, | |
| "learning_rate": 9.881897758706154e-07, | |
| "loss": 0.7366, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.070754716981132, | |
| "grad_norm": 1.2321632539336993, | |
| "learning_rate": 9.842533192281422e-07, | |
| "loss": 0.7481, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.0731132075471699, | |
| "grad_norm": 0.8194799035445371, | |
| "learning_rate": 9.803171066376995e-07, | |
| "loss": 0.7834, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.0754716981132075, | |
| "grad_norm": 0.7953471718099565, | |
| "learning_rate": 9.763811991052019e-07, | |
| "loss": 0.7508, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.0778301886792452, | |
| "grad_norm": 0.8500817934393053, | |
| "learning_rate": 9.72445657631838e-07, | |
| "loss": 0.7639, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.080188679245283, | |
| "grad_norm": 0.8053711636082244, | |
| "learning_rate": 9.68510543213121e-07, | |
| "loss": 0.7546, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.0825471698113207, | |
| "grad_norm": 1.1272698654628237, | |
| "learning_rate": 9.645759168379461e-07, | |
| "loss": 0.735, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.0849056603773586, | |
| "grad_norm": 0.7772007598787661, | |
| "learning_rate": 9.606418394876447e-07, | |
| "loss": 0.747, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.0872641509433962, | |
| "grad_norm": 0.8414360815423256, | |
| "learning_rate": 9.567083721350388e-07, | |
| "loss": 0.7411, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.0896226415094339, | |
| "grad_norm": 0.7791267607388218, | |
| "learning_rate": 9.527755757434966e-07, | |
| "loss": 0.7436, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.0919811320754718, | |
| "grad_norm": 0.80528466203603, | |
| "learning_rate": 9.48843511265987e-07, | |
| "loss": 0.7656, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.0943396226415094, | |
| "grad_norm": 0.7758351013188943, | |
| "learning_rate": 9.449122396441343e-07, | |
| "loss": 0.737, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.0966981132075473, | |
| "grad_norm": 1.2494162832647924, | |
| "learning_rate": 9.409818218072772e-07, | |
| "loss": 0.7613, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.099056603773585, | |
| "grad_norm": 0.8049303948390758, | |
| "learning_rate": 9.370523186715194e-07, | |
| "loss": 0.7628, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.1014150943396226, | |
| "grad_norm": 0.7896423211268341, | |
| "learning_rate": 9.331237911387888e-07, | |
| "loss": 0.7301, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.1037735849056605, | |
| "grad_norm": 0.7947232451183359, | |
| "learning_rate": 9.291963000958931e-07, | |
| "loss": 0.7803, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.1061320754716981, | |
| "grad_norm": 0.8128944094386984, | |
| "learning_rate": 9.252699064135758e-07, | |
| "loss": 0.7693, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.1084905660377358, | |
| "grad_norm": 0.9181658602259837, | |
| "learning_rate": 9.213446709455722e-07, | |
| "loss": 0.7794, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.1108490566037736, | |
| "grad_norm": 0.753282034988688, | |
| "learning_rate": 9.174206545276677e-07, | |
| "loss": 0.7383, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.1132075471698113, | |
| "grad_norm": 0.8890774956158611, | |
| "learning_rate": 9.134979179767532e-07, | |
| "loss": 0.7528, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.115566037735849, | |
| "grad_norm": 0.9503365963029163, | |
| "learning_rate": 9.095765220898843e-07, | |
| "loss": 0.7757, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.1179245283018868, | |
| "grad_norm": 0.9284151146362815, | |
| "learning_rate": 9.056565276433377e-07, | |
| "loss": 0.7442, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.1202830188679245, | |
| "grad_norm": 0.9704301526052668, | |
| "learning_rate": 9.017379953916695e-07, | |
| "loss": 0.7417, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.1226415094339623, | |
| "grad_norm": 0.8572079632472874, | |
| "learning_rate": 8.978209860667738e-07, | |
| "loss": 0.7724, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.125, | |
| "grad_norm": 0.7912726011032718, | |
| "learning_rate": 8.93905560376942e-07, | |
| "loss": 0.7741, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.1273584905660377, | |
| "grad_norm": 0.7762605349979439, | |
| "learning_rate": 8.899917790059207e-07, | |
| "loss": 0.7211, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.1297169811320755, | |
| "grad_norm": 0.8625085844504403, | |
| "learning_rate": 8.860797026119721e-07, | |
| "loss": 0.7327, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.1320754716981132, | |
| "grad_norm": 0.8012017130865744, | |
| "learning_rate": 8.821693918269333e-07, | |
| "loss": 0.7293, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.134433962264151, | |
| "grad_norm": 0.7832218701492647, | |
| "learning_rate": 8.782609072552776e-07, | |
| "loss": 0.7584, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.1367924528301887, | |
| "grad_norm": 0.8223365605395953, | |
| "learning_rate": 8.743543094731737e-07, | |
| "loss": 0.757, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.1391509433962264, | |
| "grad_norm": 0.769269936037437, | |
| "learning_rate": 8.704496590275477e-07, | |
| "loss": 0.7265, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.1415094339622642, | |
| "grad_norm": 0.8470727703331676, | |
| "learning_rate": 8.665470164351447e-07, | |
| "loss": 0.7446, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.1438679245283019, | |
| "grad_norm": 0.8609116334367811, | |
| "learning_rate": 8.626464421815918e-07, | |
| "loss": 0.7357, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.1462264150943395, | |
| "grad_norm": 0.7644474626055862, | |
| "learning_rate": 8.587479967204582e-07, | |
| "loss": 0.7502, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.1485849056603774, | |
| "grad_norm": 0.7974217428241436, | |
| "learning_rate": 8.548517404723206e-07, | |
| "loss": 0.7255, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.150943396226415, | |
| "grad_norm": 0.8124702999828396, | |
| "learning_rate": 8.509577338238254e-07, | |
| "loss": 0.766, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.1533018867924527, | |
| "grad_norm": 0.8292245412487236, | |
| "learning_rate": 8.47066037126754e-07, | |
| "loss": 0.7302, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.1556603773584906, | |
| "grad_norm": 0.819810501024351, | |
| "learning_rate": 8.43176710697086e-07, | |
| "loss": 0.7163, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.1580188679245282, | |
| "grad_norm": 0.8385443524556274, | |
| "learning_rate": 8.392898148140656e-07, | |
| "loss": 0.7465, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.1603773584905661, | |
| "grad_norm": 0.778726751410171, | |
| "learning_rate": 8.354054097192659e-07, | |
| "loss": 0.7758, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.1627358490566038, | |
| "grad_norm": 0.788415584362694, | |
| "learning_rate": 8.315235556156579e-07, | |
| "loss": 0.7498, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.1650943396226414, | |
| "grad_norm": 0.7703907755533744, | |
| "learning_rate": 8.276443126666742e-07, | |
| "loss": 0.7276, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.1674528301886793, | |
| "grad_norm": 0.8704987882807484, | |
| "learning_rate": 8.237677409952784e-07, | |
| "loss": 0.766, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.169811320754717, | |
| "grad_norm": 0.7948667823022109, | |
| "learning_rate": 8.198939006830333e-07, | |
| "loss": 0.7656, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.1721698113207548, | |
| "grad_norm": 0.8090640349585008, | |
| "learning_rate": 8.160228517691692e-07, | |
| "loss": 0.7257, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.1745283018867925, | |
| "grad_norm": 1.159992161506865, | |
| "learning_rate": 8.121546542496538e-07, | |
| "loss": 0.7593, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.1768867924528301, | |
| "grad_norm": 0.8230774285047441, | |
| "learning_rate": 8.082893680762618e-07, | |
| "loss": 0.7511, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.179245283018868, | |
| "grad_norm": 0.8682718782355334, | |
| "learning_rate": 8.044270531556452e-07, | |
| "loss": 0.7647, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.1816037735849056, | |
| "grad_norm": 0.8274347964467509, | |
| "learning_rate": 8.005677693484076e-07, | |
| "loss": 0.7536, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.1839622641509433, | |
| "grad_norm": 0.8334334904558071, | |
| "learning_rate": 7.967115764681731e-07, | |
| "loss": 0.782, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.1863207547169812, | |
| "grad_norm": 0.8985376970213992, | |
| "learning_rate": 7.928585342806607e-07, | |
| "loss": 0.7461, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.1886792452830188, | |
| "grad_norm": 0.7846783242941104, | |
| "learning_rate": 7.890087025027579e-07, | |
| "loss": 0.7461, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.1910377358490567, | |
| "grad_norm": 0.800897718044774, | |
| "learning_rate": 7.85162140801596e-07, | |
| "loss": 0.7525, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.1933962264150944, | |
| "grad_norm": 0.7742939894445084, | |
| "learning_rate": 7.813189087936242e-07, | |
| "loss": 0.7082, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.195754716981132, | |
| "grad_norm": 0.8945053331857878, | |
| "learning_rate": 7.774790660436857e-07, | |
| "loss": 0.7328, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.1981132075471699, | |
| "grad_norm": 0.9098394537893619, | |
| "learning_rate": 7.736426720640948e-07, | |
| "loss": 0.7802, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.2004716981132075, | |
| "grad_norm": 0.7969225975663269, | |
| "learning_rate": 7.698097863137152e-07, | |
| "loss": 0.7423, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.2028301886792452, | |
| "grad_norm": 0.8867765640390939, | |
| "learning_rate": 7.659804681970377e-07, | |
| "loss": 0.7443, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.205188679245283, | |
| "grad_norm": 0.8936277250989298, | |
| "learning_rate": 7.621547770632589e-07, | |
| "loss": 0.7446, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.2075471698113207, | |
| "grad_norm": 0.7650448661091079, | |
| "learning_rate": 7.583327722053626e-07, | |
| "loss": 0.7437, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.2099056603773586, | |
| "grad_norm": 0.7718047147502091, | |
| "learning_rate": 7.545145128592008e-07, | |
| "loss": 0.7507, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.2122641509433962, | |
| "grad_norm": 0.9572262834139598, | |
| "learning_rate": 7.507000582025748e-07, | |
| "loss": 0.7329, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.2146226415094339, | |
| "grad_norm": 0.8375942206336273, | |
| "learning_rate": 7.468894673543181e-07, | |
| "loss": 0.7393, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.2169811320754718, | |
| "grad_norm": 0.8300278395620482, | |
| "learning_rate": 7.430827993733808e-07, | |
| "loss": 0.729, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.2193396226415094, | |
| "grad_norm": 0.924932117248118, | |
| "learning_rate": 7.39280113257914e-07, | |
| "loss": 0.7418, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.2216981132075473, | |
| "grad_norm": 0.8751602815649364, | |
| "learning_rate": 7.354814679443556e-07, | |
| "loss": 0.7695, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.224056603773585, | |
| "grad_norm": 0.7891993502461758, | |
| "learning_rate": 7.316869223065155e-07, | |
| "loss": 0.7755, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.2264150943396226, | |
| "grad_norm": 0.8088776711707174, | |
| "learning_rate": 7.278965351546648e-07, | |
| "loss": 0.7572, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.2287735849056605, | |
| "grad_norm": 0.7734655717390242, | |
| "learning_rate": 7.241103652346248e-07, | |
| "loss": 0.731, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.2311320754716981, | |
| "grad_norm": 0.8075042544518887, | |
| "learning_rate": 7.20328471226854e-07, | |
| "loss": 0.7681, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.2334905660377358, | |
| "grad_norm": 0.8682085195199817, | |
| "learning_rate": 7.165509117455406e-07, | |
| "loss": 0.7869, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.2358490566037736, | |
| "grad_norm": 0.7941356815577187, | |
| "learning_rate": 7.127777453376935e-07, | |
| "loss": 0.7839, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.2382075471698113, | |
| "grad_norm": 0.797011639565925, | |
| "learning_rate": 7.090090304822355e-07, | |
| "loss": 0.7704, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.240566037735849, | |
| "grad_norm": 0.778511764914176, | |
| "learning_rate": 7.052448255890957e-07, | |
| "loss": 0.7684, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.2429245283018868, | |
| "grad_norm": 0.9154469456152399, | |
| "learning_rate": 7.014851889983057e-07, | |
| "loss": 0.747, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.2452830188679245, | |
| "grad_norm": 0.7896606577750244, | |
| "learning_rate": 6.97730178979093e-07, | |
| "loss": 0.7243, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.2476415094339623, | |
| "grad_norm": 0.8076356582882437, | |
| "learning_rate": 6.939798537289825e-07, | |
| "loss": 0.7879, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.8038075591560918, | |
| "learning_rate": 6.902342713728886e-07, | |
| "loss": 0.7304, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "eval_loss": 0.6839449405670166, | |
| "eval_runtime": 82.7741, | |
| "eval_samples_per_second": 8.517, | |
| "eval_steps_per_second": 0.544, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.2523584905660377, | |
| "grad_norm": 0.7543374055980275, | |
| "learning_rate": 6.864934899622191e-07, | |
| "loss": 0.7671, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.2547169811320755, | |
| "grad_norm": 0.7940675058566147, | |
| "learning_rate": 6.827575674739725e-07, | |
| "loss": 0.7509, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.2570754716981132, | |
| "grad_norm": 0.7898168085125467, | |
| "learning_rate": 6.790265618098423e-07, | |
| "loss": 0.7819, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.259433962264151, | |
| "grad_norm": 0.8290907307209445, | |
| "learning_rate": 6.753005307953165e-07, | |
| "loss": 0.7433, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.2617924528301887, | |
| "grad_norm": 0.8051377335366591, | |
| "learning_rate": 6.715795321787836e-07, | |
| "loss": 0.7583, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.2641509433962264, | |
| "grad_norm": 0.7964463188550859, | |
| "learning_rate": 6.678636236306357e-07, | |
| "loss": 0.7733, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.2665094339622642, | |
| "grad_norm": 1.0183118136986125, | |
| "learning_rate": 6.641528627423774e-07, | |
| "loss": 0.7648, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.2688679245283019, | |
| "grad_norm": 0.7893378901295127, | |
| "learning_rate": 6.604473070257308e-07, | |
| "loss": 0.7543, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.2712264150943398, | |
| "grad_norm": 0.962839563526039, | |
| "learning_rate": 6.567470139117447e-07, | |
| "loss": 0.7815, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.2735849056603774, | |
| "grad_norm": 0.779909096149425, | |
| "learning_rate": 6.530520407499049e-07, | |
| "loss": 0.7321, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.275943396226415, | |
| "grad_norm": 1.0157939390615558, | |
| "learning_rate": 6.493624448072457e-07, | |
| "loss": 0.7622, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.2783018867924527, | |
| "grad_norm": 0.9839155144434717, | |
| "learning_rate": 6.456782832674613e-07, | |
| "loss": 0.7498, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.2806603773584906, | |
| "grad_norm": 0.8287088174891809, | |
| "learning_rate": 6.419996132300203e-07, | |
| "loss": 0.7479, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.2830188679245282, | |
| "grad_norm": 0.8401170082742024, | |
| "learning_rate": 6.383264917092801e-07, | |
| "loss": 0.7398, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.2853773584905661, | |
| "grad_norm": 0.8649864952648277, | |
| "learning_rate": 6.34658975633605e-07, | |
| "loss": 0.7629, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.2877358490566038, | |
| "grad_norm": 0.7978032312823685, | |
| "learning_rate": 6.30997121844481e-07, | |
| "loss": 0.7455, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.2900943396226414, | |
| "grad_norm": 0.78443421199377, | |
| "learning_rate": 6.273409870956369e-07, | |
| "loss": 0.7512, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.2924528301886793, | |
| "grad_norm": 0.9742229809355082, | |
| "learning_rate": 6.236906280521646e-07, | |
| "loss": 0.7629, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.294811320754717, | |
| "grad_norm": 0.8605023916238441, | |
| "learning_rate": 6.200461012896401e-07, | |
| "loss": 0.7389, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.2971698113207548, | |
| "grad_norm": 0.7976345004555616, | |
| "learning_rate": 6.164074632932472e-07, | |
| "loss": 0.7675, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.2995283018867925, | |
| "grad_norm": 0.8766199870243025, | |
| "learning_rate": 6.127747704569015e-07, | |
| "loss": 0.7392, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.3018867924528301, | |
| "grad_norm": 0.8388074881151663, | |
| "learning_rate": 6.091480790823771e-07, | |
| "loss": 0.7594, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.304245283018868, | |
| "grad_norm": 0.7816816297148448, | |
| "learning_rate": 6.055274453784338e-07, | |
| "loss": 0.7754, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.3066037735849056, | |
| "grad_norm": 0.8051939468008078, | |
| "learning_rate": 6.019129254599456e-07, | |
| "loss": 0.7456, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.3089622641509435, | |
| "grad_norm": 0.7954437915413475, | |
| "learning_rate": 5.983045753470307e-07, | |
| "loss": 0.7804, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.3113207547169812, | |
| "grad_norm": 0.7995393304495568, | |
| "learning_rate": 5.947024509641841e-07, | |
| "loss": 0.7346, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.3136792452830188, | |
| "grad_norm": 0.7666328856774342, | |
| "learning_rate": 5.911066081394112e-07, | |
| "loss": 0.7641, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.3160377358490565, | |
| "grad_norm": 0.9135245763682434, | |
| "learning_rate": 5.875171026033608e-07, | |
| "loss": 0.7493, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.3183962264150944, | |
| "grad_norm": 0.7843209916172604, | |
| "learning_rate": 5.839339899884628e-07, | |
| "loss": 0.754, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.320754716981132, | |
| "grad_norm": 0.7798421629061796, | |
| "learning_rate": 5.803573258280654e-07, | |
| "loss": 0.7453, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.3231132075471699, | |
| "grad_norm": 0.8026516589712169, | |
| "learning_rate": 5.76787165555575e-07, | |
| "loss": 0.7791, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.3254716981132075, | |
| "grad_norm": 0.8161986502002226, | |
| "learning_rate": 5.732235645035963e-07, | |
| "loss": 0.7435, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.3278301886792452, | |
| "grad_norm": 0.7982394811556922, | |
| "learning_rate": 5.696665779030749e-07, | |
| "loss": 0.7312, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.330188679245283, | |
| "grad_norm": 0.8424060129449961, | |
| "learning_rate": 5.661162608824419e-07, | |
| "loss": 0.7209, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.3325471698113207, | |
| "grad_norm": 0.8393522976074216, | |
| "learning_rate": 5.625726684667585e-07, | |
| "loss": 0.7497, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.3349056603773586, | |
| "grad_norm": 0.8458006058371306, | |
| "learning_rate": 5.590358555768642e-07, | |
| "loss": 0.7415, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.3372641509433962, | |
| "grad_norm": 0.7731963506173513, | |
| "learning_rate": 5.555058770285246e-07, | |
| "loss": 0.7289, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.3396226415094339, | |
| "grad_norm": 0.919340196876583, | |
| "learning_rate": 5.519827875315823e-07, | |
| "loss": 0.7372, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.3419811320754718, | |
| "grad_norm": 0.8047418954501285, | |
| "learning_rate": 5.484666416891108e-07, | |
| "loss": 0.7854, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.3443396226415094, | |
| "grad_norm": 0.8420862799710107, | |
| "learning_rate": 5.449574939965636e-07, | |
| "loss": 0.7487, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.3466981132075473, | |
| "grad_norm": 0.862354237686273, | |
| "learning_rate": 5.414553988409343e-07, | |
| "loss": 0.7615, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.349056603773585, | |
| "grad_norm": 0.764673226355618, | |
| "learning_rate": 5.379604104999109e-07, | |
| "loss": 0.7402, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.3514150943396226, | |
| "grad_norm": 0.9229852280599696, | |
| "learning_rate": 5.344725831410368e-07, | |
| "loss": 0.746, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.3537735849056602, | |
| "grad_norm": 0.7749939321603743, | |
| "learning_rate": 5.309919708208686e-07, | |
| "loss": 0.7393, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.3561320754716981, | |
| "grad_norm": 1.0855469699332105, | |
| "learning_rate": 5.275186274841404e-07, | |
| "loss": 0.7292, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.3584905660377358, | |
| "grad_norm": 1.3079826302270743, | |
| "learning_rate": 5.240526069629264e-07, | |
| "loss": 0.7551, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.3608490566037736, | |
| "grad_norm": 0.8097304218867772, | |
| "learning_rate": 5.205939629758078e-07, | |
| "loss": 0.7554, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.3632075471698113, | |
| "grad_norm": 0.7629186235386943, | |
| "learning_rate": 5.171427491270393e-07, | |
| "loss": 0.7448, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.365566037735849, | |
| "grad_norm": 0.8231834244100196, | |
| "learning_rate": 5.136990189057187e-07, | |
| "loss": 0.7675, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.3679245283018868, | |
| "grad_norm": 0.7758543745731015, | |
| "learning_rate": 5.102628256849575e-07, | |
| "loss": 0.7643, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.3702830188679245, | |
| "grad_norm": 0.7915005392947032, | |
| "learning_rate": 5.068342227210545e-07, | |
| "loss": 0.732, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.3726415094339623, | |
| "grad_norm": 0.821233361833331, | |
| "learning_rate": 5.034132631526695e-07, | |
| "loss": 0.7145, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.375, | |
| "grad_norm": 0.8288831867947671, | |
| "learning_rate": 5.000000000000002e-07, | |
| "loss": 0.7717, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.3773584905660377, | |
| "grad_norm": 0.9380885957065556, | |
| "learning_rate": 4.9659448616396e-07, | |
| "loss": 0.7655, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.3797169811320755, | |
| "grad_norm": 0.769393956851887, | |
| "learning_rate": 4.9319677442536e-07, | |
| "loss": 0.7481, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.3820754716981132, | |
| "grad_norm": 0.7624074760874653, | |
| "learning_rate": 4.89806917444088e-07, | |
| "loss": 0.7408, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.384433962264151, | |
| "grad_norm": 0.8836661905432738, | |
| "learning_rate": 4.864249677582935e-07, | |
| "loss": 0.7423, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.3867924528301887, | |
| "grad_norm": 1.7675171950723623, | |
| "learning_rate": 4.830509777835744e-07, | |
| "loss": 0.7414, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.3891509433962264, | |
| "grad_norm": 0.8297208161937744, | |
| "learning_rate": 4.796849998121647e-07, | |
| "loss": 0.7571, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.3915094339622642, | |
| "grad_norm": 0.8105952844684988, | |
| "learning_rate": 4.7632708601212215e-07, | |
| "loss": 0.7473, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.3938679245283019, | |
| "grad_norm": 0.943401465741068, | |
| "learning_rate": 4.7297728842652116e-07, | |
| "loss": 0.7885, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.3962264150943398, | |
| "grad_norm": 0.8407405967900344, | |
| "learning_rate": 4.6963565897264623e-07, | |
| "loss": 0.7768, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.3985849056603774, | |
| "grad_norm": 0.7742620223687944, | |
| "learning_rate": 4.663022494411866e-07, | |
| "loss": 0.7679, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.400943396226415, | |
| "grad_norm": 0.7984390061772534, | |
| "learning_rate": 4.6297711149543405e-07, | |
| "loss": 0.7503, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.4033018867924527, | |
| "grad_norm": 0.7687242314529935, | |
| "learning_rate": 4.596602966704822e-07, | |
| "loss": 0.7593, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.4056603773584906, | |
| "grad_norm": 0.7752552260078952, | |
| "learning_rate": 4.5635185637242735e-07, | |
| "loss": 0.7236, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.4080188679245282, | |
| "grad_norm": 0.7909608031012165, | |
| "learning_rate": 4.530518418775733e-07, | |
| "loss": 0.7487, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.4103773584905661, | |
| "grad_norm": 1.4293658648035819, | |
| "learning_rate": 4.4976030433163337e-07, | |
| "loss": 0.7619, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.4127358490566038, | |
| "grad_norm": 0.9601629598020603, | |
| "learning_rate": 4.4647729474894123e-07, | |
| "loss": 0.7301, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.4150943396226414, | |
| "grad_norm": 0.8524971604621474, | |
| "learning_rate": 4.432028640116581e-07, | |
| "loss": 0.7743, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.4174528301886793, | |
| "grad_norm": 0.7955465630229851, | |
| "learning_rate": 4.399370628689857e-07, | |
| "loss": 0.7549, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.419811320754717, | |
| "grad_norm": 0.8461044127574078, | |
| "learning_rate": 4.366799419363779e-07, | |
| "loss": 0.7923, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.4221698113207548, | |
| "grad_norm": 0.7959852549975004, | |
| "learning_rate": 4.3343155169475797e-07, | |
| "loss": 0.7313, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.4245283018867925, | |
| "grad_norm": 0.8115147827054038, | |
| "learning_rate": 4.3019194248973377e-07, | |
| "loss": 0.7353, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.4268867924528301, | |
| "grad_norm": 0.7884615566733161, | |
| "learning_rate": 4.269611645308214e-07, | |
| "loss": 0.7396, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.429245283018868, | |
| "grad_norm": 0.8709874327087314, | |
| "learning_rate": 4.237392678906633e-07, | |
| "loss": 0.7557, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.4316037735849056, | |
| "grad_norm": 0.9486373793118293, | |
| "learning_rate": 4.205263025042538e-07, | |
| "loss": 0.7397, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.4339622641509435, | |
| "grad_norm": 0.8162741407892988, | |
| "learning_rate": 4.173223181681651e-07, | |
| "loss": 0.7327, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.4363207547169812, | |
| "grad_norm": 0.8564525506081095, | |
| "learning_rate": 4.141273645397754e-07, | |
| "loss": 0.7745, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.4386792452830188, | |
| "grad_norm": 0.8291423725092533, | |
| "learning_rate": 4.109414911364992e-07, | |
| "loss": 0.761, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.4410377358490565, | |
| "grad_norm": 0.9110035252316134, | |
| "learning_rate": 4.0776474733502007e-07, | |
| "loss": 0.7227, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.4433962264150944, | |
| "grad_norm": 7.248992388941307, | |
| "learning_rate": 4.045971823705249e-07, | |
| "loss": 0.7515, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.445754716981132, | |
| "grad_norm": 0.737128592265643, | |
| "learning_rate": 4.0143884533594197e-07, | |
| "loss": 0.7308, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.4481132075471699, | |
| "grad_norm": 0.7878766896489899, | |
| "learning_rate": 3.982897851811786e-07, | |
| "loss": 0.7323, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.4504716981132075, | |
| "grad_norm": 0.7859241609615767, | |
| "learning_rate": 3.951500507123627e-07, | |
| "loss": 0.7814, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.4528301886792452, | |
| "grad_norm": 0.8312739174249127, | |
| "learning_rate": 3.920196905910873e-07, | |
| "loss": 0.7446, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.455188679245283, | |
| "grad_norm": 0.8160254075021761, | |
| "learning_rate": 3.888987533336566e-07, | |
| "loss": 0.745, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.4575471698113207, | |
| "grad_norm": 0.9271092228733877, | |
| "learning_rate": 3.8578728731033214e-07, | |
| "loss": 0.7875, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.4599056603773586, | |
| "grad_norm": 1.3059448042170938, | |
| "learning_rate": 3.826853407445848e-07, | |
| "loss": 0.7758, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.4622641509433962, | |
| "grad_norm": 0.7940556961073607, | |
| "learning_rate": 3.795929617123468e-07, | |
| "loss": 0.7289, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.4646226415094339, | |
| "grad_norm": 0.7908440739021586, | |
| "learning_rate": 3.765101981412665e-07, | |
| "loss": 0.7247, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.4669811320754718, | |
| "grad_norm": 0.807553140785391, | |
| "learning_rate": 3.7343709780996614e-07, | |
| "loss": 0.7365, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.4693396226415094, | |
| "grad_norm": 2.5050420828496787, | |
| "learning_rate": 3.703737083473005e-07, | |
| "loss": 0.7729, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.4716981132075473, | |
| "grad_norm": 0.8163112108481482, | |
| "learning_rate": 3.673200772316193e-07, | |
| "loss": 0.7269, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.474056603773585, | |
| "grad_norm": 0.8451238760005333, | |
| "learning_rate": 3.6427625179003217e-07, | |
| "loss": 0.7435, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.4764150943396226, | |
| "grad_norm": 1.0290141575096419, | |
| "learning_rate": 3.6124227919767227e-07, | |
| "loss": 0.7488, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.4787735849056602, | |
| "grad_norm": 0.7955100632691958, | |
| "learning_rate": 3.5821820647696864e-07, | |
| "loss": 0.7556, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.4811320754716981, | |
| "grad_norm": 0.8238206028160229, | |
| "learning_rate": 3.552040804969149e-07, | |
| "loss": 0.7541, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.4834905660377358, | |
| "grad_norm": 0.8519020092733177, | |
| "learning_rate": 3.5219994797234476e-07, | |
| "loss": 0.7783, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.4858490566037736, | |
| "grad_norm": 0.7845182991866497, | |
| "learning_rate": 3.4920585546320625e-07, | |
| "loss": 0.7457, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.4882075471698113, | |
| "grad_norm": 0.7643446889279868, | |
| "learning_rate": 3.4622184937384156e-07, | |
| "loss": 0.7637, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.490566037735849, | |
| "grad_norm": 0.8986522134788724, | |
| "learning_rate": 3.4324797595226564e-07, | |
| "loss": 0.7431, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.4929245283018868, | |
| "grad_norm": 0.788015002889114, | |
| "learning_rate": 3.4028428128945286e-07, | |
| "loss": 0.746, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.4952830188679245, | |
| "grad_norm": 0.7754842750109397, | |
| "learning_rate": 3.3733081131861975e-07, | |
| "loss": 0.7688, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.4976415094339623, | |
| "grad_norm": 0.8009965193531273, | |
| "learning_rate": 3.343876118145141e-07, | |
| "loss": 0.7501, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 1.006640510210976, | |
| "learning_rate": 3.314547283927057e-07, | |
| "loss": 0.7239, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "eval_loss": 0.6808757185935974, | |
| "eval_runtime": 83.2321, | |
| "eval_samples_per_second": 8.47, | |
| "eval_steps_per_second": 0.541, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.5023584905660377, | |
| "grad_norm": 0.8598327085719165, | |
| "learning_rate": 3.2853220650887913e-07, | |
| "loss": 0.7149, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.5047169811320755, | |
| "grad_norm": 0.8482092774985639, | |
| "learning_rate": 3.256200914581292e-07, | |
| "loss": 0.7556, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.5070754716981132, | |
| "grad_norm": 0.7837630460537407, | |
| "learning_rate": 3.227184283742591e-07, | |
| "loss": 0.7432, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.509433962264151, | |
| "grad_norm": 0.8392002898875265, | |
| "learning_rate": 3.198272622290804e-07, | |
| "loss": 0.7395, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.5117924528301887, | |
| "grad_norm": 0.826051018451035, | |
| "learning_rate": 3.169466378317177e-07, | |
| "loss": 0.7849, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.5141509433962264, | |
| "grad_norm": 1.0059361369860251, | |
| "learning_rate": 3.1407659982791204e-07, | |
| "loss": 0.7541, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.516509433962264, | |
| "grad_norm": 0.8092490329577119, | |
| "learning_rate": 3.112171926993291e-07, | |
| "loss": 0.7348, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.5188679245283019, | |
| "grad_norm": 0.7280559308125196, | |
| "learning_rate": 3.0836846076287146e-07, | |
| "loss": 0.6873, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.5212264150943398, | |
| "grad_norm": 0.8517206567349668, | |
| "learning_rate": 3.055304481699913e-07, | |
| "loss": 0.7839, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.5235849056603774, | |
| "grad_norm": 0.8340097914857253, | |
| "learning_rate": 3.027031989060046e-07, | |
| "loss": 0.7561, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.525943396226415, | |
| "grad_norm": 0.8002849424485019, | |
| "learning_rate": 2.998867567894108e-07, | |
| "loss": 0.7747, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.5283018867924527, | |
| "grad_norm": 0.8623306309909063, | |
| "learning_rate": 2.970811654712133e-07, | |
| "loss": 0.7337, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.5306603773584906, | |
| "grad_norm": 0.8335419959901207, | |
| "learning_rate": 2.942864684342432e-07, | |
| "loss": 0.741, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.5330188679245285, | |
| "grad_norm": 0.8731062418731657, | |
| "learning_rate": 2.91502708992485e-07, | |
| "loss": 0.7424, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.5353773584905661, | |
| "grad_norm": 0.819811746630834, | |
| "learning_rate": 2.8872993029040506e-07, | |
| "loss": 0.7355, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.5377358490566038, | |
| "grad_norm": 0.8074813557073929, | |
| "learning_rate": 2.859681753022838e-07, | |
| "loss": 0.7594, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.5400943396226414, | |
| "grad_norm": 0.820619090005774, | |
| "learning_rate": 2.8321748683154887e-07, | |
| "loss": 0.7443, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.5424528301886793, | |
| "grad_norm": 0.8554565499942881, | |
| "learning_rate": 2.8047790751011216e-07, | |
| "loss": 0.6991, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.544811320754717, | |
| "grad_norm": 0.7784690870870672, | |
| "learning_rate": 2.777494797977088e-07, | |
| "loss": 0.7328, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.5471698113207548, | |
| "grad_norm": 1.7923607795927756, | |
| "learning_rate": 2.7503224598123895e-07, | |
| "loss": 0.7266, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.5495283018867925, | |
| "grad_norm": 0.7677443512564135, | |
| "learning_rate": 2.7232624817411376e-07, | |
| "loss": 0.737, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.5518867924528301, | |
| "grad_norm": 0.8055758349794541, | |
| "learning_rate": 2.6963152831560066e-07, | |
| "loss": 0.7444, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.5542452830188678, | |
| "grad_norm": 0.9008016637756693, | |
| "learning_rate": 2.6694812817017387e-07, | |
| "loss": 0.7128, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.5566037735849056, | |
| "grad_norm": 0.8106690468627914, | |
| "learning_rate": 2.642760893268684e-07, | |
| "loss": 0.7457, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.5589622641509435, | |
| "grad_norm": 1.027228042593183, | |
| "learning_rate": 2.616154531986345e-07, | |
| "loss": 0.7251, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.5613207547169812, | |
| "grad_norm": 0.7957920778904417, | |
| "learning_rate": 2.5896626102169594e-07, | |
| "loss": 0.7481, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.5636792452830188, | |
| "grad_norm": 0.7514802697133819, | |
| "learning_rate": 2.5632855385491037e-07, | |
| "loss": 0.757, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.5660377358490565, | |
| "grad_norm": 0.8814866050056973, | |
| "learning_rate": 2.53702372579134e-07, | |
| "loss": 0.7361, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.5683962264150944, | |
| "grad_norm": 0.8346312832230348, | |
| "learning_rate": 2.51087757896587e-07, | |
| "loss": 0.7378, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.5707547169811322, | |
| "grad_norm": 0.8164772219257521, | |
| "learning_rate": 2.4848475033022377e-07, | |
| "loss": 0.7631, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.5731132075471699, | |
| "grad_norm": 0.7933267558316177, | |
| "learning_rate": 2.458933902231038e-07, | |
| "loss": 0.7288, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.5754716981132075, | |
| "grad_norm": 0.8420080383383726, | |
| "learning_rate": 2.4331371773776687e-07, | |
| "loss": 0.77, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.5778301886792452, | |
| "grad_norm": 5.289720197423799, | |
| "learning_rate": 2.407457728556115e-07, | |
| "loss": 0.7184, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.580188679245283, | |
| "grad_norm": 1.0842649090634284, | |
| "learning_rate": 2.3818959537627282e-07, | |
| "loss": 0.7383, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.5825471698113207, | |
| "grad_norm": 0.8519744691939684, | |
| "learning_rate": 2.3564522491700833e-07, | |
| "loss": 0.769, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.5849056603773586, | |
| "grad_norm": 0.8168668326966967, | |
| "learning_rate": 2.3311270091208256e-07, | |
| "loss": 0.7444, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.5872641509433962, | |
| "grad_norm": 0.7514913975722562, | |
| "learning_rate": 2.3059206261215668e-07, | |
| "loss": 0.691, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.5896226415094339, | |
| "grad_norm": 0.7519874181403705, | |
| "learning_rate": 2.2808334908367909e-07, | |
| "loss": 0.7425, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.5919811320754715, | |
| "grad_norm": 0.8353526410754343, | |
| "learning_rate": 2.2558659920828095e-07, | |
| "loss": 0.7616, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.5943396226415094, | |
| "grad_norm": 0.7738579301027763, | |
| "learning_rate": 2.2310185168217212e-07, | |
| "loss": 0.7756, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.5966981132075473, | |
| "grad_norm": 0.813046680487735, | |
| "learning_rate": 2.206291450155441e-07, | |
| "loss": 0.7675, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.599056603773585, | |
| "grad_norm": 0.7815916852088219, | |
| "learning_rate": 2.181685175319702e-07, | |
| "loss": 0.7459, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.6014150943396226, | |
| "grad_norm": 0.7482293961455181, | |
| "learning_rate": 2.157200073678137e-07, | |
| "loss": 0.7237, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.6037735849056602, | |
| "grad_norm": 0.9475759536537903, | |
| "learning_rate": 2.132836524716355e-07, | |
| "loss": 0.718, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.6061320754716981, | |
| "grad_norm": 0.8731724054191227, | |
| "learning_rate": 2.1085949060360653e-07, | |
| "loss": 0.7344, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.608490566037736, | |
| "grad_norm": 0.8124879077769539, | |
| "learning_rate": 2.0844755933492263e-07, | |
| "loss": 0.7515, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.6108490566037736, | |
| "grad_norm": 0.7772794085957034, | |
| "learning_rate": 2.0604789604722205e-07, | |
| "loss": 0.7471, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.6132075471698113, | |
| "grad_norm": 0.8144653370194169, | |
| "learning_rate": 2.0366053793200565e-07, | |
| "loss": 0.7724, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.615566037735849, | |
| "grad_norm": 1.1024190577460233, | |
| "learning_rate": 2.0128552199006198e-07, | |
| "loss": 0.7389, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.6179245283018868, | |
| "grad_norm": 0.7813510804481604, | |
| "learning_rate": 1.9892288503089205e-07, | |
| "loss": 0.7688, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.6202830188679245, | |
| "grad_norm": 0.7649385235808642, | |
| "learning_rate": 1.9657266367213898e-07, | |
| "loss": 0.7279, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.6226415094339623, | |
| "grad_norm": 0.8664030315613933, | |
| "learning_rate": 1.9423489433902184e-07, | |
| "loss": 0.7604, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.625, | |
| "grad_norm": 0.8134285929715391, | |
| "learning_rate": 1.9190961326377053e-07, | |
| "loss": 0.7628, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.6273584905660377, | |
| "grad_norm": 0.8126410627826297, | |
| "learning_rate": 1.8959685648506362e-07, | |
| "loss": 0.7479, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.6297169811320755, | |
| "grad_norm": 0.8243887159653667, | |
| "learning_rate": 1.8729665984747e-07, | |
| "loss": 0.743, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.6320754716981132, | |
| "grad_norm": 0.8107223249523531, | |
| "learning_rate": 1.8500905900089403e-07, | |
| "loss": 0.7601, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.634433962264151, | |
| "grad_norm": 0.8038068007057135, | |
| "learning_rate": 1.82734089400022e-07, | |
| "loss": 0.7643, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.6367924528301887, | |
| "grad_norm": 0.9712605139438796, | |
| "learning_rate": 1.804717863037737e-07, | |
| "loss": 0.7445, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.6391509433962264, | |
| "grad_norm": 0.7879085057169681, | |
| "learning_rate": 1.7822218477475494e-07, | |
| "loss": 0.7485, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.641509433962264, | |
| "grad_norm": 0.8550428267297568, | |
| "learning_rate": 1.7598531967871465e-07, | |
| "loss": 0.7783, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.6438679245283019, | |
| "grad_norm": 0.7795732830836477, | |
| "learning_rate": 1.737612256840053e-07, | |
| "loss": 0.7484, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.6462264150943398, | |
| "grad_norm": 0.8133414594705825, | |
| "learning_rate": 1.7154993726104328e-07, | |
| "loss": 0.7751, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.6485849056603774, | |
| "grad_norm": 0.7994545997274072, | |
| "learning_rate": 1.6935148868177718e-07, | |
| "loss": 0.7501, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.650943396226415, | |
| "grad_norm": 0.7496147135106944, | |
| "learning_rate": 1.6716591401915502e-07, | |
| "loss": 0.759, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.6533018867924527, | |
| "grad_norm": 0.7274661448787512, | |
| "learning_rate": 1.6499324714659758e-07, | |
| "loss": 0.735, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.6556603773584906, | |
| "grad_norm": 0.8892069878489233, | |
| "learning_rate": 1.6283352173747146e-07, | |
| "loss": 0.7694, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.6580188679245285, | |
| "grad_norm": 0.7851702666302485, | |
| "learning_rate": 1.6068677126456897e-07, | |
| "loss": 0.7373, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.6603773584905661, | |
| "grad_norm": 0.7771975549062173, | |
| "learning_rate": 1.585530289995878e-07, | |
| "loss": 0.7101, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.6627358490566038, | |
| "grad_norm": 0.8852742002247123, | |
| "learning_rate": 1.564323280126173e-07, | |
| "loss": 0.7525, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.6650943396226414, | |
| "grad_norm": 1.0210362257937011, | |
| "learning_rate": 1.5432470117162433e-07, | |
| "loss": 0.7752, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.6674528301886793, | |
| "grad_norm": 0.8077571397037019, | |
| "learning_rate": 1.522301811419442e-07, | |
| "loss": 0.7395, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.669811320754717, | |
| "grad_norm": 0.8331764729151993, | |
| "learning_rate": 1.5014880038577482e-07, | |
| "loss": 0.7416, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.6721698113207548, | |
| "grad_norm": 2.1774846943729935, | |
| "learning_rate": 1.4808059116167303e-07, | |
| "loss": 0.7492, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.6745283018867925, | |
| "grad_norm": 0.8939523687224223, | |
| "learning_rate": 1.460255855240552e-07, | |
| "loss": 0.7358, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.6768867924528301, | |
| "grad_norm": 0.7563678829167328, | |
| "learning_rate": 1.4398381532269998e-07, | |
| "loss": 0.7593, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.6792452830188678, | |
| "grad_norm": 0.7668313174457733, | |
| "learning_rate": 1.4195531220225487e-07, | |
| "loss": 0.7275, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.6816037735849056, | |
| "grad_norm": 0.7791231762469476, | |
| "learning_rate": 1.39940107601746e-07, | |
| "loss": 0.7692, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.6839622641509435, | |
| "grad_norm": 1.014530101254706, | |
| "learning_rate": 1.3793823275409066e-07, | |
| "loss": 0.7475, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.6863207547169812, | |
| "grad_norm": 0.754925190142359, | |
| "learning_rate": 1.3594971868561232e-07, | |
| "loss": 0.7248, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.6886792452830188, | |
| "grad_norm": 0.7608021380299098, | |
| "learning_rate": 1.3397459621556128e-07, | |
| "loss": 0.7558, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.6910377358490565, | |
| "grad_norm": 1.320307190976547, | |
| "learning_rate": 1.320128959556369e-07, | |
| "loss": 0.7393, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.6933962264150944, | |
| "grad_norm": 0.8339214047495644, | |
| "learning_rate": 1.300646483095118e-07, | |
| "loss": 0.7597, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.6957547169811322, | |
| "grad_norm": 0.8826119280765871, | |
| "learning_rate": 1.2812988347236166e-07, | |
| "loss": 0.7416, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.6981132075471699, | |
| "grad_norm": 0.8928251184591149, | |
| "learning_rate": 1.262086314303973e-07, | |
| "loss": 0.7318, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.7004716981132075, | |
| "grad_norm": 0.7951715947182522, | |
| "learning_rate": 1.243009219603993e-07, | |
| "loss": 0.7482, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.7028301886792452, | |
| "grad_norm": 0.8741704408992066, | |
| "learning_rate": 1.2240678462925723e-07, | |
| "loss": 0.7287, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.705188679245283, | |
| "grad_norm": 0.7786335970762307, | |
| "learning_rate": 1.2052624879351103e-07, | |
| "loss": 0.719, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.7075471698113207, | |
| "grad_norm": 0.9985291709831805, | |
| "learning_rate": 1.1865934359889573e-07, | |
| "loss": 0.7453, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.7099056603773586, | |
| "grad_norm": 0.7859405679363147, | |
| "learning_rate": 1.1680609797989038e-07, | |
| "loss": 0.7452, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.7122641509433962, | |
| "grad_norm": 0.8028725796240722, | |
| "learning_rate": 1.1496654065926925e-07, | |
| "loss": 0.7173, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.7146226415094339, | |
| "grad_norm": 0.8165982522602734, | |
| "learning_rate": 1.1314070014765642e-07, | |
| "loss": 0.7218, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.7169811320754715, | |
| "grad_norm": 0.7860295163288017, | |
| "learning_rate": 1.1132860474308436e-07, | |
| "loss": 0.7614, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.7193396226415094, | |
| "grad_norm": 0.7889724037710595, | |
| "learning_rate": 1.0953028253055541e-07, | |
| "loss": 0.7195, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.7216981132075473, | |
| "grad_norm": 0.8067588097960763, | |
| "learning_rate": 1.0774576138160596e-07, | |
| "loss": 0.7482, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.724056603773585, | |
| "grad_norm": 0.786061208841395, | |
| "learning_rate": 1.0597506895387499e-07, | |
| "loss": 0.7472, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.7264150943396226, | |
| "grad_norm": 0.7917438727246212, | |
| "learning_rate": 1.0421823269067442e-07, | |
| "loss": 0.7751, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.7287735849056602, | |
| "grad_norm": 0.7907162871021037, | |
| "learning_rate": 1.024752798205658e-07, | |
| "loss": 0.7202, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.7311320754716981, | |
| "grad_norm": 0.9995843557820588, | |
| "learning_rate": 1.0074623735693633e-07, | |
| "loss": 0.7471, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.733490566037736, | |
| "grad_norm": 1.3558698569505305, | |
| "learning_rate": 9.903113209758096e-08, | |
| "loss": 0.7564, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.7358490566037736, | |
| "grad_norm": 0.7870396429020904, | |
| "learning_rate": 9.732999062428704e-08, | |
| "loss": 0.7641, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.7382075471698113, | |
| "grad_norm": 0.8525068172515468, | |
| "learning_rate": 9.564283930242257e-08, | |
| "loss": 0.7404, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.740566037735849, | |
| "grad_norm": 0.8014286873652903, | |
| "learning_rate": 9.396970428052697e-08, | |
| "loss": 0.7194, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.7429245283018868, | |
| "grad_norm": 0.7676759180901973, | |
| "learning_rate": 9.231061148990648e-08, | |
| "loss": 0.7221, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.7452830188679245, | |
| "grad_norm": 0.9131948150344564, | |
| "learning_rate": 9.066558664423163e-08, | |
| "loss": 0.7572, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.7476415094339623, | |
| "grad_norm": 0.7734044180311592, | |
| "learning_rate": 8.903465523913955e-08, | |
| "loss": 0.7757, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.755029746442279, | |
| "learning_rate": 8.741784255183759e-08, | |
| "loss": 0.7411, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "eval_loss": 0.6796497702598572, | |
| "eval_runtime": 82.4746, | |
| "eval_samples_per_second": 8.548, | |
| "eval_steps_per_second": 0.546, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.7523584905660377, | |
| "grad_norm": 0.8497097319379918, | |
| "learning_rate": 8.581517364071267e-08, | |
| "loss": 0.7203, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.7547169811320755, | |
| "grad_norm": 0.8020779183255858, | |
| "learning_rate": 8.422667334494249e-08, | |
| "loss": 0.7558, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.7570754716981132, | |
| "grad_norm": 0.7616565389684601, | |
| "learning_rate": 8.265236628411087e-08, | |
| "loss": 0.7422, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.759433962264151, | |
| "grad_norm": 0.9323853208771872, | |
| "learning_rate": 8.109227685782538e-08, | |
| "loss": 0.7819, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.7617924528301887, | |
| "grad_norm": 1.350644299549973, | |
| "learning_rate": 7.954642924533994e-08, | |
| "loss": 0.7492, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.7641509433962264, | |
| "grad_norm": 0.8092647636710026, | |
| "learning_rate": 7.801484740517939e-08, | |
| "loss": 0.7638, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.766509433962264, | |
| "grad_norm": 0.8308445659034376, | |
| "learning_rate": 7.649755507476952e-08, | |
| "loss": 0.7444, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.7688679245283019, | |
| "grad_norm": 2.5649621576701587, | |
| "learning_rate": 7.499457577006751e-08, | |
| "loss": 0.7713, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.7712264150943398, | |
| "grad_norm": 0.786323334872225, | |
| "learning_rate": 7.350593278519823e-08, | |
| "loss": 0.7426, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.7735849056603774, | |
| "grad_norm": 0.7795649745069329, | |
| "learning_rate": 7.203164919209359e-08, | |
| "loss": 0.7674, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.775943396226415, | |
| "grad_norm": 0.8003158325581134, | |
| "learning_rate": 7.057174784013431e-08, | |
| "loss": 0.7531, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 1.7783018867924527, | |
| "grad_norm": 0.7834816769548146, | |
| "learning_rate": 6.912625135579586e-08, | |
| "loss": 0.7212, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 1.7806603773584906, | |
| "grad_norm": 0.8289452420732738, | |
| "learning_rate": 6.76951821422982e-08, | |
| "loss": 0.7715, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.7830188679245285, | |
| "grad_norm": 0.8410465172590653, | |
| "learning_rate": 6.627856237925811e-08, | |
| "loss": 0.7276, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 1.7853773584905661, | |
| "grad_norm": 0.8394202851068477, | |
| "learning_rate": 6.487641402234612e-08, | |
| "loss": 0.744, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 1.7877358490566038, | |
| "grad_norm": 0.8383760649776106, | |
| "learning_rate": 6.348875880294535e-08, | |
| "loss": 0.7384, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 1.7900943396226414, | |
| "grad_norm": 1.453114388460075, | |
| "learning_rate": 6.211561822781474e-08, | |
| "loss": 0.7765, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 1.7924528301886793, | |
| "grad_norm": 0.8499896987869552, | |
| "learning_rate": 6.075701357875662e-08, | |
| "loss": 0.7284, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.794811320754717, | |
| "grad_norm": 0.8652146496280481, | |
| "learning_rate": 5.9412965912286396e-08, | |
| "loss": 0.7561, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 1.7971698113207548, | |
| "grad_norm": 0.8299032649447099, | |
| "learning_rate": 5.808349605930585e-08, | |
| "loss": 0.7565, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 1.7995283018867925, | |
| "grad_norm": 0.7812451643671983, | |
| "learning_rate": 5.6768624624780604e-08, | |
| "loss": 0.725, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 1.8018867924528301, | |
| "grad_norm": 0.8477713992666978, | |
| "learning_rate": 5.5468371987420936e-08, | |
| "loss": 0.7466, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 1.8042452830188678, | |
| "grad_norm": 0.8337239533995923, | |
| "learning_rate": 5.4182758299365364e-08, | |
| "loss": 0.735, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.8066037735849056, | |
| "grad_norm": 0.7691176408664865, | |
| "learning_rate": 5.29118034858691e-08, | |
| "loss": 0.772, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 1.8089622641509435, | |
| "grad_norm": 0.8049526588971233, | |
| "learning_rate": 5.165552724499478e-08, | |
| "loss": 0.7315, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 1.8113207547169812, | |
| "grad_norm": 0.8319464046786564, | |
| "learning_rate": 5.0413949047306894e-08, | |
| "loss": 0.7726, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.8136792452830188, | |
| "grad_norm": 0.8474092435937203, | |
| "learning_rate": 4.918708813557093e-08, | |
| "loss": 0.7487, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 1.8160377358490565, | |
| "grad_norm": 0.7943675010786753, | |
| "learning_rate": 4.797496352445396e-08, | |
| "loss": 0.7691, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.8183962264150944, | |
| "grad_norm": 0.8217628363291084, | |
| "learning_rate": 4.677759400023085e-08, | |
| "loss": 0.7437, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 1.8207547169811322, | |
| "grad_norm": 0.832358842867652, | |
| "learning_rate": 4.55949981204925e-08, | |
| "loss": 0.7568, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 1.8231132075471699, | |
| "grad_norm": 0.8613869764376921, | |
| "learning_rate": 4.442719421385921e-08, | |
| "loss": 0.7274, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 1.8254716981132075, | |
| "grad_norm": 0.7931363978514411, | |
| "learning_rate": 4.3274200379695315e-08, | |
| "loss": 0.7209, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 1.8278301886792452, | |
| "grad_norm": 0.7551383801460104, | |
| "learning_rate": 4.213603448782932e-08, | |
| "loss": 0.7533, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.830188679245283, | |
| "grad_norm": 0.8476707134264969, | |
| "learning_rate": 4.101271417827668e-08, | |
| "loss": 0.7585, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 1.8325471698113207, | |
| "grad_norm": 0.8877158557106878, | |
| "learning_rate": 3.9904256860967433e-08, | |
| "loss": 0.7222, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 1.8349056603773586, | |
| "grad_norm": 0.8633047498966683, | |
| "learning_rate": 3.881067971547469e-08, | |
| "loss": 0.7502, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 1.8372641509433962, | |
| "grad_norm": 1.0520376373477054, | |
| "learning_rate": 3.7731999690749585e-08, | |
| "loss": 0.7443, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.8396226415094339, | |
| "grad_norm": 0.7492875546723151, | |
| "learning_rate": 3.666823350485848e-08, | |
| "loss": 0.7781, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.8419811320754715, | |
| "grad_norm": 0.7847944050831147, | |
| "learning_rate": 3.561939764472299e-08, | |
| "loss": 0.737, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.8443396226415094, | |
| "grad_norm": 1.368419762468144, | |
| "learning_rate": 3.458550836586582e-08, | |
| "loss": 0.7523, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 1.8466981132075473, | |
| "grad_norm": 0.8704033001785062, | |
| "learning_rate": 3.356658169215743e-08, | |
| "loss": 0.742, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 1.849056603773585, | |
| "grad_norm": 0.764412444543419, | |
| "learning_rate": 3.2562633415568754e-08, | |
| "loss": 0.7327, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 1.8514150943396226, | |
| "grad_norm": 0.8695554372256408, | |
| "learning_rate": 3.157367909592601e-08, | |
| "loss": 0.7338, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.8537735849056602, | |
| "grad_norm": 0.7794614378925147, | |
| "learning_rate": 3.0599734060669626e-08, | |
| "loss": 0.7446, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 1.8561320754716981, | |
| "grad_norm": 0.7712181969029962, | |
| "learning_rate": 2.9640813404616327e-08, | |
| "loss": 0.7377, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 1.858490566037736, | |
| "grad_norm": 0.8826462854163208, | |
| "learning_rate": 2.869693198972556e-08, | |
| "loss": 0.7555, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 1.8608490566037736, | |
| "grad_norm": 1.029113085742105, | |
| "learning_rate": 2.7768104444869434e-08, | |
| "loss": 0.7795, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 1.8632075471698113, | |
| "grad_norm": 0.8318363964351398, | |
| "learning_rate": 2.6854345165605474e-08, | |
| "loss": 0.7351, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.865566037735849, | |
| "grad_norm": 0.7865924156566725, | |
| "learning_rate": 2.595566831395346e-08, | |
| "loss": 0.7222, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 1.8679245283018868, | |
| "grad_norm": 0.7955098420784998, | |
| "learning_rate": 2.507208781817638e-08, | |
| "loss": 0.7515, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 1.8702830188679245, | |
| "grad_norm": 0.9146875621357804, | |
| "learning_rate": 2.4203617372564378e-08, | |
| "loss": 0.7173, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 1.8726415094339623, | |
| "grad_norm": 0.7544835268896075, | |
| "learning_rate": 2.3350270437222374e-08, | |
| "loss": 0.7307, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 1.806767663849715, | |
| "learning_rate": 2.2512060237861452e-08, | |
| "loss": 0.7301, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.8773584905660377, | |
| "grad_norm": 0.804925906379938, | |
| "learning_rate": 2.1688999765594018e-08, | |
| "loss": 0.7552, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.8797169811320755, | |
| "grad_norm": 0.7773879448134918, | |
| "learning_rate": 2.0881101776732967e-08, | |
| "loss": 0.7619, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.8820754716981132, | |
| "grad_norm": 0.871709115043609, | |
| "learning_rate": 2.0088378792592286e-08, | |
| "loss": 0.7686, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.884433962264151, | |
| "grad_norm": 0.7446217889352331, | |
| "learning_rate": 1.9310843099295204e-08, | |
| "loss": 0.7128, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.8867924528301887, | |
| "grad_norm": 0.7661065857274754, | |
| "learning_rate": 1.8548506747582128e-08, | |
| "loss": 0.7284, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.8891509433962264, | |
| "grad_norm": 0.7916406522727306, | |
| "learning_rate": 1.780138155262456e-08, | |
| "loss": 0.7321, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.891509433962264, | |
| "grad_norm": 0.9641121942586747, | |
| "learning_rate": 1.7069479093842042e-08, | |
| "loss": 0.7411, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 1.8938679245283019, | |
| "grad_norm": 0.7930873801533905, | |
| "learning_rate": 1.6352810714722387e-08, | |
| "loss": 0.7345, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 1.8962264150943398, | |
| "grad_norm": 0.9298677041430008, | |
| "learning_rate": 1.565138752264572e-08, | |
| "loss": 0.7616, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 1.8985849056603774, | |
| "grad_norm": 0.885574237101463, | |
| "learning_rate": 1.496522038871295e-08, | |
| "loss": 0.7572, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.900943396226415, | |
| "grad_norm": 0.8153461825360933, | |
| "learning_rate": 1.4294319947577017e-08, | |
| "loss": 0.75, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 1.9033018867924527, | |
| "grad_norm": 0.7660700579693493, | |
| "learning_rate": 1.3638696597277677e-08, | |
| "loss": 0.7421, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 1.9056603773584906, | |
| "grad_norm": 0.805964323807633, | |
| "learning_rate": 1.2998360499080763e-08, | |
| "loss": 0.7958, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 1.9080188679245285, | |
| "grad_norm": 0.9429626916004815, | |
| "learning_rate": 1.2373321577320628e-08, | |
| "loss": 0.734, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 1.9103773584905661, | |
| "grad_norm": 0.7904102497155711, | |
| "learning_rate": 1.1763589519246387e-08, | |
| "loss": 0.7478, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.9127358490566038, | |
| "grad_norm": 0.7928898152839706, | |
| "learning_rate": 1.1169173774871477e-08, | |
| "loss": 0.7373, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 1.9150943396226414, | |
| "grad_norm": 0.7814723884700895, | |
| "learning_rate": 1.0590083556827556e-08, | |
| "loss": 0.7615, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 1.9174528301886793, | |
| "grad_norm": 0.7802344182322299, | |
| "learning_rate": 1.0026327840221727e-08, | |
| "loss": 0.7503, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 1.919811320754717, | |
| "grad_norm": 1.0812085791485797, | |
| "learning_rate": 9.477915362496758e-09, | |
| "loss": 0.7614, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 1.9221698113207548, | |
| "grad_norm": 0.820203697559199, | |
| "learning_rate": 8.94485462329675e-09, | |
| "loss": 0.7374, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.9245283018867925, | |
| "grad_norm": 0.8722587060636673, | |
| "learning_rate": 8.42715388433446e-09, | |
| "loss": 0.7328, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.9268867924528301, | |
| "grad_norm": 0.7840984315269908, | |
| "learning_rate": 7.924821169263963e-09, | |
| "loss": 0.7542, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 1.9292452830188678, | |
| "grad_norm": 0.8440330231818731, | |
| "learning_rate": 7.437864263555638e-09, | |
| "loss": 0.7317, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 1.9316037735849056, | |
| "grad_norm": 0.895812932209063, | |
| "learning_rate": 6.966290714375933e-09, | |
| "loss": 0.7409, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 1.9339622641509435, | |
| "grad_norm": 0.899638041623908, | |
| "learning_rate": 6.510107830470568e-09, | |
| "loss": 0.7446, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.9363207547169812, | |
| "grad_norm": 0.8152793437016046, | |
| "learning_rate": 6.069322682050515e-09, | |
| "loss": 0.7634, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 1.9386792452830188, | |
| "grad_norm": 0.7735814965641481, | |
| "learning_rate": 5.643942100683308e-09, | |
| "loss": 0.7809, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 1.9410377358490565, | |
| "grad_norm": 0.8351447564551893, | |
| "learning_rate": 5.23397267918646e-09, | |
| "loss": 0.7588, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 1.9433962264150944, | |
| "grad_norm": 0.7614597204440534, | |
| "learning_rate": 4.83942077152577e-09, | |
| "loss": 0.7231, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 1.9457547169811322, | |
| "grad_norm": 0.8548976791386363, | |
| "learning_rate": 4.460292492716511e-09, | |
| "loss": 0.754, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.9481132075471699, | |
| "grad_norm": 0.8381107415157564, | |
| "learning_rate": 4.0965937187287246e-09, | |
| "loss": 0.7351, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 1.9504716981132075, | |
| "grad_norm": 5.772455437843555, | |
| "learning_rate": 3.748330086396523e-09, | |
| "loss": 0.7657, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 1.9528301886792452, | |
| "grad_norm": 0.797010233582818, | |
| "learning_rate": 3.415506993330153e-09, | |
| "loss": 0.7248, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 1.955188679245283, | |
| "grad_norm": 0.777781094479457, | |
| "learning_rate": 3.0981295978326216e-09, | |
| "loss": 0.7604, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 1.9575471698113207, | |
| "grad_norm": 0.8449725908943951, | |
| "learning_rate": 2.7962028188198706e-09, | |
| "loss": 0.7736, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.9599056603773586, | |
| "grad_norm": 0.7801120467589358, | |
| "learning_rate": 2.5097313357442806e-09, | |
| "loss": 0.7178, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 1.9622641509433962, | |
| "grad_norm": 0.7762312098767981, | |
| "learning_rate": 2.2387195885221756e-09, | |
| "loss": 0.7575, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 1.9646226415094339, | |
| "grad_norm": 0.824362916729911, | |
| "learning_rate": 1.983171777465431e-09, | |
| "loss": 0.76, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 1.9669811320754715, | |
| "grad_norm": 0.9204175438402101, | |
| "learning_rate": 1.743091863215751e-09, | |
| "loss": 0.7312, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 1.9693396226415094, | |
| "grad_norm": 0.8426151332993567, | |
| "learning_rate": 1.5184835666838258e-09, | |
| "loss": 0.7489, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.9716981132075473, | |
| "grad_norm": 0.7997774511020088, | |
| "learning_rate": 1.3093503689910467e-09, | |
| "loss": 0.7243, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 1.974056603773585, | |
| "grad_norm": 1.0915014977180026, | |
| "learning_rate": 1.1156955114162147e-09, | |
| "loss": 0.7411, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 1.9764150943396226, | |
| "grad_norm": 0.7394147838620203, | |
| "learning_rate": 9.375219953450253e-10, | |
| "loss": 0.746, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 1.9787735849056602, | |
| "grad_norm": 0.7690476142622722, | |
| "learning_rate": 7.748325822234392e-10, | |
| "loss": 0.729, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 1.9811320754716981, | |
| "grad_norm": 0.9039884352435014, | |
| "learning_rate": 6.276297935149388e-10, | |
| "loss": 0.7569, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.983490566037736, | |
| "grad_norm": 1.2814279255523484, | |
| "learning_rate": 4.959159106615596e-10, | |
| "loss": 0.7351, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 1.9858490566037736, | |
| "grad_norm": 0.8004748840113347, | |
| "learning_rate": 3.7969297504858443e-10, | |
| "loss": 0.724, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 1.9882075471698113, | |
| "grad_norm": 0.9234344448728469, | |
| "learning_rate": 2.789627879725698e-10, | |
| "loss": 0.749, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 1.990566037735849, | |
| "grad_norm": 1.0793509856974828, | |
| "learning_rate": 1.9372691061381175e-10, | |
| "loss": 0.7296, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 1.9929245283018868, | |
| "grad_norm": 0.8234018950018985, | |
| "learning_rate": 1.2398666401181035e-10, | |
| "loss": 0.738, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.9952830188679245, | |
| "grad_norm": 0.7591071692242605, | |
| "learning_rate": 6.974312904517443e-11, | |
| "loss": 0.7713, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 1.9976415094339623, | |
| "grad_norm": 0.8367565544655438, | |
| "learning_rate": 3.099714641452422e-11, | |
| "loss": 0.7812, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.8006380089754, | |
| "learning_rate": 7.749316629612756e-12, | |
| "loss": 0.769, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.6794618964195251, | |
| "eval_runtime": 82.3418, | |
| "eval_samples_per_second": 8.562, | |
| "eval_steps_per_second": 0.547, | |
| "step": 848 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 848, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 212, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.964690701765837e+17, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |