| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.03125, | |
| "global_step": 1600, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 9.5e-06, | |
| "loss": 372.7093, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.2000000000000003e-05, | |
| "loss": 151.4812, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3.45e-05, | |
| "loss": 41.7603, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.7e-05, | |
| "loss": 31.3741, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "eval_loss": 25.858156204223633, | |
| "eval_runtime": 606.3297, | |
| "eval_samples_per_second": 2.891, | |
| "eval_steps_per_second": 0.724, | |
| "eval_wer": 10.482824557809192, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 5.95e-05, | |
| "loss": 23.4941, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 7.2e-05, | |
| "loss": 18.4366, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 8.450000000000001e-05, | |
| "loss": 18.0765, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.7e-05, | |
| "loss": 13.0078, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "eval_loss": 13.417298316955566, | |
| "eval_runtime": 603.1513, | |
| "eval_samples_per_second": 2.906, | |
| "eval_steps_per_second": 0.728, | |
| "eval_wer": 10.482824557809192, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.864285714285715e-05, | |
| "loss": 14.841, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.685714285714286e-05, | |
| "loss": 11.6129, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.507142857142857e-05, | |
| "loss": 10.2453, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.328571428571429e-05, | |
| "loss": 10.3619, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "eval_loss": 10.854023933410645, | |
| "eval_runtime": 606.96, | |
| "eval_samples_per_second": 2.888, | |
| "eval_steps_per_second": 0.723, | |
| "eval_wer": 10.482824557809192, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 9.15e-05, | |
| "loss": 10.9065, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 8.971428571428571e-05, | |
| "loss": 9.402, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 8.792857142857144e-05, | |
| "loss": 9.4776, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 8.614285714285714e-05, | |
| "loss": 8.7869, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "eval_loss": 9.624931335449219, | |
| "eval_runtime": 698.0201, | |
| "eval_samples_per_second": 2.511, | |
| "eval_steps_per_second": 0.629, | |
| "eval_wer": 10.482824557809192, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 8.435714285714286e-05, | |
| "loss": 7.8405, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 8.257142857142858e-05, | |
| "loss": 8.2306, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 8.078571428571428e-05, | |
| "loss": 7.319, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 7.900000000000001e-05, | |
| "loss": 7.3964, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "eval_loss": 9.181172370910645, | |
| "eval_runtime": 668.1581, | |
| "eval_samples_per_second": 2.624, | |
| "eval_steps_per_second": 0.657, | |
| "eval_wer": 10.482824557809192, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 7.721428571428572e-05, | |
| "loss": 7.9307, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 7.542857142857144e-05, | |
| "loss": 7.1104, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 7.364285714285715e-05, | |
| "loss": 5.5863, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 7.185714285714285e-05, | |
| "loss": 6.6321, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "eval_loss": 8.653571128845215, | |
| "eval_runtime": 607.9213, | |
| "eval_samples_per_second": 2.884, | |
| "eval_steps_per_second": 0.722, | |
| "eval_wer": 10.482824557809192, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 7.007142857142858e-05, | |
| "loss": 6.9731, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 6.828571428571429e-05, | |
| "loss": 5.9349, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 6.65e-05, | |
| "loss": 5.804, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 6.471428571428572e-05, | |
| "loss": 6.4612, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "eval_loss": 8.604568481445312, | |
| "eval_runtime": 678.2991, | |
| "eval_samples_per_second": 2.584, | |
| "eval_steps_per_second": 0.647, | |
| "eval_wer": 10.482824557809192, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 6.292857142857143e-05, | |
| "loss": 5.0099, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 6.114285714285714e-05, | |
| "loss": 5.1272, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 5.935714285714286e-05, | |
| "loss": 5.9429, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 5.757142857142858e-05, | |
| "loss": 4.8358, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "eval_loss": 8.08900260925293, | |
| "eval_runtime": 611.9198, | |
| "eval_samples_per_second": 2.865, | |
| "eval_steps_per_second": 0.717, | |
| "eval_wer": 10.482824557809192, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 5.5785714285714285e-05, | |
| "loss": 5.3602, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 5.4000000000000005e-05, | |
| "loss": 5.2596, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 5.221428571428572e-05, | |
| "loss": 4.3448, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 5.042857142857144e-05, | |
| "loss": 4.4918, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "eval_loss": 8.314087867736816, | |
| "eval_runtime": 625.3932, | |
| "eval_samples_per_second": 2.803, | |
| "eval_steps_per_second": 0.702, | |
| "eval_wer": 10.482824557809192, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 4.8642857142857145e-05, | |
| "loss": 4.9251, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 4.685714285714286e-05, | |
| "loss": 4.4105, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 4.507142857142858e-05, | |
| "loss": 4.1762, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 4.328571428571429e-05, | |
| "loss": 4.7548, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "eval_loss": 8.166025161743164, | |
| "eval_runtime": 603.2099, | |
| "eval_samples_per_second": 2.906, | |
| "eval_steps_per_second": 0.728, | |
| "eval_wer": 10.482824557809192, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 4.15e-05, | |
| "loss": 3.9682, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 3.971428571428571e-05, | |
| "loss": 3.5912, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 3.792857142857143e-05, | |
| "loss": 4.0124, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 3.6142857142857146e-05, | |
| "loss": 3.7881, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "eval_loss": 8.247111320495605, | |
| "eval_runtime": 593.8424, | |
| "eval_samples_per_second": 2.952, | |
| "eval_steps_per_second": 0.739, | |
| "eval_wer": 10.482824557809192, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 3.435714285714286e-05, | |
| "loss": 3.5058, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 3.257142857142857e-05, | |
| "loss": 4.0264, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 3.078571428571429e-05, | |
| "loss": 3.1674, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 2.9e-05, | |
| "loss": 3.1916, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "eval_loss": 8.077924728393555, | |
| "eval_runtime": 592.5004, | |
| "eval_samples_per_second": 2.959, | |
| "eval_steps_per_second": 0.741, | |
| "eval_wer": 10.482824557809192, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 2.7214285714285714e-05, | |
| "loss": 3.6668, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 2.542857142857143e-05, | |
| "loss": 2.8314, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 2.3642857142857144e-05, | |
| "loss": 3.271, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 2.185714285714286e-05, | |
| "loss": 3.2039, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "eval_loss": 8.110590934753418, | |
| "eval_runtime": 591.963, | |
| "eval_samples_per_second": 2.961, | |
| "eval_steps_per_second": 0.742, | |
| "eval_wer": 10.482824557809192, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 2.007142857142857e-05, | |
| "loss": 3.3088, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "learning_rate": 1.8285714285714288e-05, | |
| "loss": 2.5956, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 1.65e-05, | |
| "loss": 2.7281, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 1.4714285714285713e-05, | |
| "loss": 3.038, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_loss": 8.087455749511719, | |
| "eval_runtime": 586.1394, | |
| "eval_samples_per_second": 2.991, | |
| "eval_steps_per_second": 0.749, | |
| "eval_wer": 10.482824557809192, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 1.2928571428571428e-05, | |
| "loss": 2.4357, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 1.1142857142857143e-05, | |
| "loss": 3.0457, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "learning_rate": 9.357142857142857e-06, | |
| "loss": 2.8716, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 9.07, | |
| "learning_rate": 7.571428571428572e-06, | |
| "loss": 2.3249, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 9.07, | |
| "eval_loss": 8.102546691894531, | |
| "eval_runtime": 588.9514, | |
| "eval_samples_per_second": 2.976, | |
| "eval_steps_per_second": 0.745, | |
| "eval_wer": 10.482824557809192, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "learning_rate": 5.785714285714286e-06, | |
| "loss": 2.4739, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 2.6765, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 10.02, | |
| "learning_rate": 2.214285714285714e-06, | |
| "loss": 2.2102, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "learning_rate": 4.285714285714286e-07, | |
| "loss": 2.6124, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "eval_loss": 8.151402473449707, | |
| "eval_runtime": 587.8674, | |
| "eval_samples_per_second": 2.982, | |
| "eval_steps_per_second": 0.747, | |
| "eval_wer": 10.482824557809192, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "step": 1600, | |
| "total_flos": 1.4987327135313598e+19, | |
| "train_loss": 15.193398921489715, | |
| "train_runtime": 17685.7852, | |
| "train_samples_per_second": 2.895, | |
| "train_steps_per_second": 0.09 | |
| } | |
| ], | |
| "max_steps": 1600, | |
| "num_train_epochs": 9223372036854775807, | |
| "total_flos": 1.4987327135313598e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |