| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "global_step": 22170, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.8872350022553004e-05, | |
| "loss": 1.7015, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.7744700045106e-05, | |
| "loss": 1.5752, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.6617050067659e-05, | |
| "loss": 1.5199, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.5489400090211996e-05, | |
| "loss": 1.4862, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.4361750112765e-05, | |
| "loss": 1.4695, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.3234100135318e-05, | |
| "loss": 1.44, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.2106450157870994e-05, | |
| "loss": 1.4353, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.0978800180423996e-05, | |
| "loss": 1.4165, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.9851150202977e-05, | |
| "loss": 1.4017, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.872350022553e-05, | |
| "loss": 1.3854, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.7595850248083e-05, | |
| "loss": 1.3809, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.6468200270635996e-05, | |
| "loss": 1.3731, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 3.5340550293189e-05, | |
| "loss": 1.3641, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.4212900315742e-05, | |
| "loss": 1.3546, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 3.3085250338294994e-05, | |
| "loss": 1.3382, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 3.1957600360847996e-05, | |
| "loss": 1.3303, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.0829950383401e-05, | |
| "loss": 1.3253, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.9702300405953993e-05, | |
| "loss": 1.3242, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 2.8574650428506994e-05, | |
| "loss": 1.307, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 2.744700045105999e-05, | |
| "loss": 1.3029, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.631935047361299e-05, | |
| "loss": 1.3085, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.5191700496165993e-05, | |
| "loss": 1.2841, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.406405051871899e-05, | |
| "loss": 1.2921, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.293640054127199e-05, | |
| "loss": 1.2855, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.1808750563824988e-05, | |
| "loss": 1.2759, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.068110058637799e-05, | |
| "loss": 1.26, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.955345060893099e-05, | |
| "loss": 1.2742, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.842580063148399e-05, | |
| "loss": 1.2761, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.7298150654036988e-05, | |
| "loss": 1.2549, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.6170500676589986e-05, | |
| "loss": 1.2552, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.5042850699142988e-05, | |
| "loss": 1.2356, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.3915200721695986e-05, | |
| "loss": 1.239, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.2787550744248986e-05, | |
| "loss": 1.2378, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.1659900766801986e-05, | |
| "loss": 1.2431, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0532250789354985e-05, | |
| "loss": 1.2202, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 9.404600811907985e-06, | |
| "loss": 1.232, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 8.276950834460983e-06, | |
| "loss": 1.2274, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 7.149300857013984e-06, | |
| "loss": 1.2233, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 6.021650879566982e-06, | |
| "loss": 1.2234, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.894000902119982e-06, | |
| "loss": 1.2216, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.7663509246729816e-06, | |
| "loss": 1.2164, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.638700947225981e-06, | |
| "loss": 1.2203, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.5110509697789808e-06, | |
| "loss": 1.2105, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.8340099233198017e-07, | |
| "loss": 1.2186, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 22170, | |
| "train_runtime": 19825.6129, | |
| "train_samples_per_second": 1.118 | |
| } | |
| ], | |
| "max_steps": 22170, | |
| "num_train_epochs": 3, | |
| "total_flos": 59663810085304320, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |