| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "global_step": 155200, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.98389175257732e-05, | |
| "loss": 2.2272, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9677835051546396e-05, | |
| "loss": 2.1881, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.951675257731959e-05, | |
| "loss": 2.1808, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.935567010309279e-05, | |
| "loss": 2.1891, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.919458762886598e-05, | |
| "loss": 2.2239, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.903350515463918e-05, | |
| "loss": 2.1983, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.8872422680412375e-05, | |
| "loss": 2.2063, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.871134020618557e-05, | |
| "loss": 2.2125, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.855025773195876e-05, | |
| "loss": 2.1918, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.838917525773196e-05, | |
| "loss": 2.2064, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.8228092783505156e-05, | |
| "loss": 2.1789, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.806701030927835e-05, | |
| "loss": 2.2123, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.790592783505155e-05, | |
| "loss": 2.1775, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.774484536082475e-05, | |
| "loss": 2.1837, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.758376288659794e-05, | |
| "loss": 2.1812, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.7422680412371134e-05, | |
| "loss": 2.1857, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.726159793814433e-05, | |
| "loss": 2.1788, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.710051546391753e-05, | |
| "loss": 2.2204, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 4.6939432989690725e-05, | |
| "loss": 2.1704, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.677835051546392e-05, | |
| "loss": 2.2155, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.661726804123712e-05, | |
| "loss": 2.2153, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.6456185567010316e-05, | |
| "loss": 2.2051, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.6295103092783506e-05, | |
| "loss": 2.2273, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.61340206185567e-05, | |
| "loss": 2.1818, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.59729381443299e-05, | |
| "loss": 2.1954, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.581185567010309e-05, | |
| "loss": 2.1635, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.565077319587629e-05, | |
| "loss": 2.1799, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.5489690721649484e-05, | |
| "loss": 2.1742, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.532860824742268e-05, | |
| "loss": 2.1402, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.516752577319588e-05, | |
| "loss": 2.1322, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.5006443298969075e-05, | |
| "loss": 2.1972, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.484536082474227e-05, | |
| "loss": 1.8657, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.468427835051547e-05, | |
| "loss": 1.8975, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 4.452319587628866e-05, | |
| "loss": 1.8662, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.4362113402061856e-05, | |
| "loss": 1.8394, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.4201030927835053e-05, | |
| "loss": 1.8698, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.403994845360825e-05, | |
| "loss": 1.8718, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.387886597938145e-05, | |
| "loss": 1.8674, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.3717783505154644e-05, | |
| "loss": 1.9007, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 4.3556701030927835e-05, | |
| "loss": 1.8961, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.339561855670103e-05, | |
| "loss": 1.8516, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.323453608247423e-05, | |
| "loss": 1.9022, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.3073453608247426e-05, | |
| "loss": 1.8913, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.2912371134020616e-05, | |
| "loss": 1.9092, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.275128865979381e-05, | |
| "loss": 1.8871, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 4.259020618556701e-05, | |
| "loss": 1.9027, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.242912371134021e-05, | |
| "loss": 1.9055, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.2268041237113404e-05, | |
| "loss": 1.8874, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.21069587628866e-05, | |
| "loss": 1.9049, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.19458762886598e-05, | |
| "loss": 1.9106, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.1784793814432995e-05, | |
| "loss": 1.9016, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 4.162371134020619e-05, | |
| "loss": 1.9368, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.146262886597939e-05, | |
| "loss": 1.9027, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 4.130154639175258e-05, | |
| "loss": 1.9143, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 4.1140463917525776e-05, | |
| "loss": 1.9305, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.097938144329897e-05, | |
| "loss": 1.9137, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.081829896907216e-05, | |
| "loss": 1.9027, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 4.065721649484536e-05, | |
| "loss": 1.9199, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 4.049613402061856e-05, | |
| "loss": 1.935, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 4.0335051546391754e-05, | |
| "loss": 1.9394, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 4.017396907216495e-05, | |
| "loss": 1.9376, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 4.001288659793815e-05, | |
| "loss": 1.8943, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 3.9851804123711345e-05, | |
| "loss": 1.6319, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 3.9690721649484535e-05, | |
| "loss": 1.5991, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 3.952963917525773e-05, | |
| "loss": 1.6505, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 3.936855670103093e-05, | |
| "loss": 1.6288, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 3.9207474226804126e-05, | |
| "loss": 1.6686, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 3.904639175257732e-05, | |
| "loss": 1.6538, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.888530927835052e-05, | |
| "loss": 1.6228, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.872422680412372e-05, | |
| "loss": 1.6465, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 3.8563144329896914e-05, | |
| "loss": 1.67, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 3.8402061855670104e-05, | |
| "loss": 1.6598, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 3.82409793814433e-05, | |
| "loss": 1.6909, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 3.807989690721649e-05, | |
| "loss": 1.6686, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 3.791881443298969e-05, | |
| "loss": 1.6587, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.7757731958762886e-05, | |
| "loss": 1.6826, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.759664948453608e-05, | |
| "loss": 1.6711, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.743556701030928e-05, | |
| "loss": 1.6633, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.7274484536082477e-05, | |
| "loss": 1.6806, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.7113402061855674e-05, | |
| "loss": 1.6888, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 3.695231958762887e-05, | |
| "loss": 1.6788, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 3.679123711340207e-05, | |
| "loss": 1.6988, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 3.663015463917526e-05, | |
| "loss": 1.6428, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 3.6469072164948455e-05, | |
| "loss": 1.6797, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.630798969072165e-05, | |
| "loss": 1.6623, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.614690721649485e-05, | |
| "loss": 1.6899, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 3.5985824742268046e-05, | |
| "loss": 1.706, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.5824742268041236e-05, | |
| "loss": 1.6955, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.566365979381443e-05, | |
| "loss": 1.7074, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.550257731958763e-05, | |
| "loss": 1.7156, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.534149484536083e-05, | |
| "loss": 1.6655, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 3.5180412371134024e-05, | |
| "loss": 1.6848, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 3.5019329896907214e-05, | |
| "loss": 1.6926, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 3.485824742268041e-05, | |
| "loss": 1.4406, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 3.469716494845361e-05, | |
| "loss": 1.4185, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 3.4536082474226805e-05, | |
| "loss": 1.4487, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 3.4375e-05, | |
| "loss": 1.4217, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 3.42139175257732e-05, | |
| "loss": 1.469, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 3.4052835051546396e-05, | |
| "loss": 1.4631, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 3.389175257731959e-05, | |
| "loss": 1.4367, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.373067010309279e-05, | |
| "loss": 1.4733, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 3.356958762886598e-05, | |
| "loss": 1.455, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 3.340850515463918e-05, | |
| "loss": 1.4458, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 3.3247422680412374e-05, | |
| "loss": 1.4681, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 3.3086340206185564e-05, | |
| "loss": 1.4796, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 3.292525773195876e-05, | |
| "loss": 1.4969, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 3.276417525773196e-05, | |
| "loss": 1.4861, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.2603092783505155e-05, | |
| "loss": 1.5032, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 3.244201030927835e-05, | |
| "loss": 1.463, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 3.228092783505155e-05, | |
| "loss": 1.4772, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 3.2119845360824746e-05, | |
| "loss": 1.4969, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 3.1958762886597937e-05, | |
| "loss": 1.5088, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 3.1797680412371134e-05, | |
| "loss": 1.487, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.163659793814433e-05, | |
| "loss": 1.477, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.147551546391753e-05, | |
| "loss": 1.486, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 3.1314432989690725e-05, | |
| "loss": 1.4838, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.115335051546392e-05, | |
| "loss": 1.4992, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 3.099226804123712e-05, | |
| "loss": 1.4949, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 3.0831185567010316e-05, | |
| "loss": 1.5133, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 3.0670103092783506e-05, | |
| "loss": 1.5086, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 3.0509020618556706e-05, | |
| "loss": 1.4887, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 3.0347938144329896e-05, | |
| "loss": 1.5304, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 3.0186855670103093e-05, | |
| "loss": 1.5248, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.002577319587629e-05, | |
| "loss": 1.4788, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 2.9864690721649484e-05, | |
| "loss": 1.3236, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 2.970360824742268e-05, | |
| "loss": 1.242, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 2.9542525773195878e-05, | |
| "loss": 1.2471, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 2.9381443298969075e-05, | |
| "loss": 1.2918, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 2.9220360824742272e-05, | |
| "loss": 1.2944, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 2.905927835051547e-05, | |
| "loss": 1.2813, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 2.889819587628866e-05, | |
| "loss": 1.2879, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.8737113402061856e-05, | |
| "loss": 1.28, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.857603092783505e-05, | |
| "loss": 1.2657, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.8414948453608247e-05, | |
| "loss": 1.3145, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.8253865979381444e-05, | |
| "loss": 1.3195, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.809278350515464e-05, | |
| "loss": 1.2844, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 2.7931701030927838e-05, | |
| "loss": 1.3339, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.7770618556701035e-05, | |
| "loss": 1.3075, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.760953608247423e-05, | |
| "loss": 1.314, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 2.7448453608247425e-05, | |
| "loss": 1.308, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 2.728737113402062e-05, | |
| "loss": 1.3234, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 2.7126288659793812e-05, | |
| "loss": 1.3307, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 2.696520618556701e-05, | |
| "loss": 1.3426, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.6804123711340206e-05, | |
| "loss": 1.3332, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 2.6643041237113403e-05, | |
| "loss": 1.3369, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.64819587628866e-05, | |
| "loss": 1.3446, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.6320876288659797e-05, | |
| "loss": 1.3266, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 2.615979381443299e-05, | |
| "loss": 1.3303, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 2.5998711340206188e-05, | |
| "loss": 1.332, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.5837628865979385e-05, | |
| "loss": 1.3235, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 2.5676546391752575e-05, | |
| "loss": 1.3405, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 2.5515463917525772e-05, | |
| "loss": 1.3568, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.535438144329897e-05, | |
| "loss": 1.3569, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 2.5193298969072166e-05, | |
| "loss": 1.3393, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 2.5032216494845363e-05, | |
| "loss": 1.3391, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 2.4871134020618557e-05, | |
| "loss": 1.1727, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 2.4710051546391754e-05, | |
| "loss": 1.1299, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 2.4548969072164947e-05, | |
| "loss": 1.1557, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 2.4387886597938144e-05, | |
| "loss": 1.1172, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 2.422680412371134e-05, | |
| "loss": 1.1455, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 2.406572164948454e-05, | |
| "loss": 1.1802, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 2.3904639175257735e-05, | |
| "loss": 1.156, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 2.374355670103093e-05, | |
| "loss": 1.1724, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 2.3582474226804126e-05, | |
| "loss": 1.1663, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 2.342139175257732e-05, | |
| "loss": 1.1727, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 2.3260309278350517e-05, | |
| "loss": 1.1767, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 2.3099226804123713e-05, | |
| "loss": 1.1817, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 2.2938144329896907e-05, | |
| "loss": 1.1878, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 2.2777061855670104e-05, | |
| "loss": 1.1664, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 2.26159793814433e-05, | |
| "loss": 1.1513, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 2.2454896907216498e-05, | |
| "loss": 1.1762, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 2.229381443298969e-05, | |
| "loss": 1.1857, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 2.2132731958762885e-05, | |
| "loss": 1.1986, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 2.1971649484536082e-05, | |
| "loss": 1.1738, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 2.181056701030928e-05, | |
| "loss": 1.2002, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 2.1649484536082476e-05, | |
| "loss": 1.1515, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 2.1488402061855673e-05, | |
| "loss": 1.2072, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 2.1327319587628867e-05, | |
| "loss": 1.1773, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.1166237113402064e-05, | |
| "loss": 1.1906, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 2.1005154639175257e-05, | |
| "loss": 1.2048, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 2.0844072164948454e-05, | |
| "loss": 1.2176, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 2.0682989690721648e-05, | |
| "loss": 1.1869, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 2.0521907216494845e-05, | |
| "loss": 1.1964, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 2.0360824742268042e-05, | |
| "loss": 1.176, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 2.019974226804124e-05, | |
| "loss": 1.2437, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 2.0038659793814436e-05, | |
| "loss": 1.2032, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.987757731958763e-05, | |
| "loss": 1.0818, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.9716494845360827e-05, | |
| "loss": 1.0236, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 1.955541237113402e-05, | |
| "loss": 1.0517, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 1.9394329896907217e-05, | |
| "loss": 1.0446, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 1.9233247422680414e-05, | |
| "loss": 1.0508, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 1.9072164948453608e-05, | |
| "loss": 1.0296, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 1.8911082474226805e-05, | |
| "loss": 1.0353, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 1.8750000000000002e-05, | |
| "loss": 1.0498, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.85889175257732e-05, | |
| "loss": 1.0701, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 1.8427835051546392e-05, | |
| "loss": 1.0654, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 1.8266752577319586e-05, | |
| "loss": 1.0504, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 1.8105670103092783e-05, | |
| "loss": 1.0527, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 1.794458762886598e-05, | |
| "loss": 1.0575, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "learning_rate": 1.7783505154639177e-05, | |
| "loss": 1.0683, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "learning_rate": 1.7622422680412374e-05, | |
| "loss": 1.0589, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.7461340206185568e-05, | |
| "loss": 1.0853, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 1.7300257731958764e-05, | |
| "loss": 1.0632, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 1.7139175257731958e-05, | |
| "loss": 1.0379, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.6978092783505155e-05, | |
| "loss": 1.052, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 1.6817010309278352e-05, | |
| "loss": 1.0583, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.6655927835051546e-05, | |
| "loss": 1.1123, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 1.6494845360824743e-05, | |
| "loss": 1.0807, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 1.633376288659794e-05, | |
| "loss": 1.0447, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.6172680412371137e-05, | |
| "loss": 1.0392, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 1.601159793814433e-05, | |
| "loss": 1.0944, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "learning_rate": 1.5850515463917527e-05, | |
| "loss": 1.0617, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "learning_rate": 1.568943298969072e-05, | |
| "loss": 1.0552, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "learning_rate": 1.5528350515463918e-05, | |
| "loss": 1.1154, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 6.93, | |
| "learning_rate": 1.5367268041237115e-05, | |
| "loss": 1.0879, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.5206185567010308e-05, | |
| "loss": 1.0709, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 1.5045103092783505e-05, | |
| "loss": 1.0835, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.4884020618556702e-05, | |
| "loss": 0.974, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.4722938144329898e-05, | |
| "loss": 0.9417, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 1.4561855670103095e-05, | |
| "loss": 0.9393, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 1.4400773195876288e-05, | |
| "loss": 0.9574, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 1.4239690721649485e-05, | |
| "loss": 0.9629, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "learning_rate": 1.407860824742268e-05, | |
| "loss": 0.97, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 1.3917525773195878e-05, | |
| "loss": 0.9266, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 1.3756443298969075e-05, | |
| "loss": 0.9694, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 1.3595360824742268e-05, | |
| "loss": 0.9429, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 1.3434278350515463e-05, | |
| "loss": 0.9373, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 1.327319587628866e-05, | |
| "loss": 0.9613, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 1.3112113402061857e-05, | |
| "loss": 0.9507, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 1.2951030927835053e-05, | |
| "loss": 0.9546, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 1.2789948453608246e-05, | |
| "loss": 0.9737, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 1.2628865979381443e-05, | |
| "loss": 0.9765, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 1.246778350515464e-05, | |
| "loss": 0.9772, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 1.2306701030927836e-05, | |
| "loss": 0.9745, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 1.2145618556701031e-05, | |
| "loss": 0.9693, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 1.1984536082474228e-05, | |
| "loss": 0.9641, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 1.1823453608247423e-05, | |
| "loss": 0.9696, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "learning_rate": 1.1662371134020619e-05, | |
| "loss": 0.9849, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 1.1501288659793814e-05, | |
| "loss": 1.0011, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 1.134020618556701e-05, | |
| "loss": 0.9824, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 1.1179123711340208e-05, | |
| "loss": 0.9685, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 1.1018041237113403e-05, | |
| "loss": 0.9807, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 1.0856958762886598e-05, | |
| "loss": 0.9776, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 1.0695876288659794e-05, | |
| "loss": 0.9665, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 1.053479381443299e-05, | |
| "loss": 0.9841, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 7.93, | |
| "learning_rate": 1.0373711340206186e-05, | |
| "loss": 0.978, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "learning_rate": 1.0212628865979381e-05, | |
| "loss": 0.9936, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 7.99, | |
| "learning_rate": 1.0051546391752578e-05, | |
| "loss": 0.979, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 9.890463917525774e-06, | |
| "loss": 0.9301, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 9.729381443298969e-06, | |
| "loss": 0.8839, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 9.568298969072164e-06, | |
| "loss": 0.8927, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 9.407216494845361e-06, | |
| "loss": 0.8764, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 9.246134020618558e-06, | |
| "loss": 0.8763, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 9.085051546391753e-06, | |
| "loss": 0.8848, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 8.22, | |
| "learning_rate": 8.923969072164949e-06, | |
| "loss": 0.8908, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 8.762886597938144e-06, | |
| "loss": 0.9014, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 8.601804123711341e-06, | |
| "loss": 0.882, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 8.440721649484536e-06, | |
| "loss": 0.9012, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 8.279639175257732e-06, | |
| "loss": 0.859, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 8.118556701030929e-06, | |
| "loss": 0.8917, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 7.957474226804124e-06, | |
| "loss": 0.9153, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 7.79639175257732e-06, | |
| "loss": 0.8929, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 7.635309278350516e-06, | |
| "loss": 0.8982, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "learning_rate": 7.4742268041237115e-06, | |
| "loss": 0.8929, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 7.313144329896908e-06, | |
| "loss": 0.9249, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 7.152061855670103e-06, | |
| "loss": 0.8743, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 6.990979381443299e-06, | |
| "loss": 0.8956, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 6.829896907216494e-06, | |
| "loss": 0.8994, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 6.668814432989691e-06, | |
| "loss": 0.8952, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 6.5077319587628875e-06, | |
| "loss": 0.9033, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 8.73, | |
| "learning_rate": 6.346649484536083e-06, | |
| "loss": 0.9032, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 8.76, | |
| "learning_rate": 6.185567010309279e-06, | |
| "loss": 0.8964, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 6.024484536082474e-06, | |
| "loss": 0.8843, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 5.86340206185567e-06, | |
| "loss": 0.9027, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 5.7023195876288665e-06, | |
| "loss": 0.9043, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 5.541237113402062e-06, | |
| "loss": 0.8974, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 5.380154639175258e-06, | |
| "loss": 0.8937, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "learning_rate": 5.219072164948454e-06, | |
| "loss": 0.8971, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "learning_rate": 5.057989690721649e-06, | |
| "loss": 0.9233, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 4.8969072164948455e-06, | |
| "loss": 0.852, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "learning_rate": 4.735824742268042e-06, | |
| "loss": 0.8142, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 4.574742268041237e-06, | |
| "loss": 0.8369, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 4.413659793814433e-06, | |
| "loss": 0.8492, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 4.252577319587629e-06, | |
| "loss": 0.8308, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 9.18, | |
| "learning_rate": 4.0914948453608246e-06, | |
| "loss": 0.8706, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 3.930412371134021e-06, | |
| "loss": 0.8536, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "learning_rate": 3.7693298969072164e-06, | |
| "loss": 0.8231, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 3.608247422680412e-06, | |
| "loss": 0.8492, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "learning_rate": 3.4471649484536087e-06, | |
| "loss": 0.8233, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 3.2860824742268044e-06, | |
| "loss": 0.8217, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 3.125e-06, | |
| "loss": 0.8694, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 2.963917525773196e-06, | |
| "loss": 0.866, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 2.802835051546392e-06, | |
| "loss": 0.8402, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 2.6417525773195877e-06, | |
| "loss": 0.84, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 2.4806701030927835e-06, | |
| "loss": 0.8501, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 9.54, | |
| "learning_rate": 2.3195876288659796e-06, | |
| "loss": 0.8374, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 2.1585051546391753e-06, | |
| "loss": 0.8645, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "learning_rate": 1.997422680412371e-06, | |
| "loss": 0.8513, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 1.8363402061855672e-06, | |
| "loss": 0.8475, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 1.675257731958763e-06, | |
| "loss": 0.8474, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 1.5141752577319588e-06, | |
| "loss": 0.8414, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 9.73, | |
| "learning_rate": 1.3530927835051548e-06, | |
| "loss": 0.8441, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 1.1920103092783505e-06, | |
| "loss": 0.8762, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 1.0309278350515464e-06, | |
| "loss": 0.842, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 9.83, | |
| "learning_rate": 8.698453608247423e-07, | |
| "loss": 0.8571, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 9.86, | |
| "learning_rate": 7.087628865979382e-07, | |
| "loss": 0.8309, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 5.47680412371134e-07, | |
| "loss": 0.8334, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 3.8659793814432993e-07, | |
| "loss": 0.8501, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "learning_rate": 2.2551546391752576e-07, | |
| "loss": 0.8423, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "learning_rate": 6.443298969072165e-08, | |
| "loss": 0.8466, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 155200, | |
| "total_flos": 1.441343477907456e+17, | |
| "train_loss": 1.3494602605485424, | |
| "train_runtime": 31681.5151, | |
| "train_samples_per_second": 4.899, | |
| "train_steps_per_second": 4.899 | |
| } | |
| ], | |
| "max_steps": 155200, | |
| "num_train_epochs": 10, | |
| "total_flos": 1.441343477907456e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |