| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9981785063752276, | |
| "eval_steps": 500, | |
| "global_step": 274, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 7.142857142857143e-06, | |
| "loss": 1.7833, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.571428571428572e-05, | |
| "loss": 1.7803, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 7.142857142857143e-05, | |
| "loss": 1.5541, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00010714285714285715, | |
| "loss": 1.3651, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00014285714285714287, | |
| "loss": 1.2684, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0001785714285714286, | |
| "loss": 1.2601, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019996738360808565, | |
| "loss": 1.2498, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019960069350344548, | |
| "loss": 1.2452, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019882804237803488, | |
| "loss": 1.2473, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019765257946935944, | |
| "loss": 1.1842, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00019607909582962477, | |
| "loss": 1.2224, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00019411400479795617, | |
| "loss": 1.2516, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0001917653158603628, | |
| "loss": 1.2017, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00018904260200399006, | |
| "loss": 1.1943, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00018595696069872013, | |
| "loss": 1.213, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00018252096866515558, | |
| "loss": 1.1741, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00017874863061334657, | |
| "loss": 1.1809, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00017465532216119625, | |
| "loss": 1.1773, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00017025772716520323, | |
| "loss": 1.1956, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00016557376971897266, | |
| "loss": 1.1867, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0001606225410966638, | |
| "loss": 1.182, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0001554242219391425, | |
| "loss": 1.1625, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 1.1626, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00014437198378669598, | |
| "loss": 1.1716, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0001385631124488136, | |
| "loss": 1.1372, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00013259706228071285, | |
| "loss": 1.161, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0001264981502196662, | |
| "loss": 1.1578, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00012029123473280668, | |
| "loss": 1.1553, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00011400161449686293, | |
| "loss": 1.1518, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0001076549252836496, | |
| "loss": 1.15, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00010127703547159739, | |
| "loss": 1.1362, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.489394060920496e-05, | |
| "loss": 1.1724, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.853165746015997e-05, | |
| "loss": 1.1116, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.221611796198985e-05, | |
| "loss": 1.1193, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.597306353045393e-05, | |
| "loss": 1.132, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.982794014048077e-05, | |
| "loss": 1.1159, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 6.380579461128819e-05, | |
| "loss": 1.1067, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.793117251841659e-05, | |
| "loss": 1.1426, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 5.222801814877369e-05, | |
| "loss": 1.1398, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.671957690646345e-05, | |
| "loss": 1.1299, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.142830056718052e-05, | |
| "loss": 1.1109, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.637575576734404e-05, | |
| "loss": 1.0975, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.158253610095697e-05, | |
| "loss": 1.1381, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.706817818247551e-05, | |
| "loss": 1.1048, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.2851082017805703e-05, | |
| "loss": 1.145, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.8948436007986546e-05, | |
| "loss": 1.104, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.5376146891235598e-05, | |
| "loss": 1.0886, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.214877490890578e-05, | |
| "loss": 1.1076, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 9.279474459608805e-06, | |
| "loss": 1.127, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.779940483393032e-06, | |
| "loss": 1.1085, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.660360794506946e-06, | |
| "loss": 1.1209, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.929374557035036e-06, | |
| "loss": 1.0841, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.5940370726542863e-06, | |
| "loss": 1.0937, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 6.597910240324967e-07, | |
| "loss": 1.1317, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.3044429107700318e-07, | |
| "loss": 1.1007, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.2122626304626465, | |
| "eval_runtime": 236.2559, | |
| "eval_samples_per_second": 2.671, | |
| "eval_steps_per_second": 0.669, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 274, | |
| "total_flos": 9.632874487611392e+16, | |
| "train_loss": 1.1792847770844064, | |
| "train_runtime": 1678.8973, | |
| "train_samples_per_second": 0.653, | |
| "train_steps_per_second": 0.163 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 274, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "total_flos": 9.632874487611392e+16, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |