| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9994440473353983, | |
| "eval_steps": 500, | |
| "global_step": 1573, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0031768723691525695, | |
| "grad_norm": 17.809675216674805, | |
| "learning_rate": 6.329113924050634e-07, | |
| "loss": 1.9316, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.006353744738305139, | |
| "grad_norm": 14.445319175720215, | |
| "learning_rate": 1.2658227848101267e-06, | |
| "loss": 1.9416, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.009530617107457708, | |
| "grad_norm": 8.333172798156738, | |
| "learning_rate": 1.8987341772151901e-06, | |
| "loss": 1.79, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.012707489476610278, | |
| "grad_norm": 5.169636249542236, | |
| "learning_rate": 2.5316455696202535e-06, | |
| "loss": 1.5866, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.015884361845762845, | |
| "grad_norm": 4.046501636505127, | |
| "learning_rate": 3.164556962025317e-06, | |
| "loss": 1.3408, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.019061234214915415, | |
| "grad_norm": 5.918606758117676, | |
| "learning_rate": 3.7974683544303802e-06, | |
| "loss": 1.0916, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.022238106584067985, | |
| "grad_norm": 3.1505024433135986, | |
| "learning_rate": 4.430379746835443e-06, | |
| "loss": 1.0186, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.025414978953220556, | |
| "grad_norm": 3.3473265171051025, | |
| "learning_rate": 5.063291139240507e-06, | |
| "loss": 0.9224, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.028591851322373123, | |
| "grad_norm": 3.4320054054260254, | |
| "learning_rate": 5.69620253164557e-06, | |
| "loss": 0.8603, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.03176872369152569, | |
| "grad_norm": 3.0531117916107178, | |
| "learning_rate": 6.329113924050634e-06, | |
| "loss": 0.8021, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03494559606067826, | |
| "grad_norm": 2.459690570831299, | |
| "learning_rate": 6.962025316455697e-06, | |
| "loss": 0.7707, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.03812246842983083, | |
| "grad_norm": 2.4381773471832275, | |
| "learning_rate": 7.5949367088607605e-06, | |
| "loss": 0.7425, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.041299340798983404, | |
| "grad_norm": 2.486311435699463, | |
| "learning_rate": 8.227848101265824e-06, | |
| "loss": 0.7261, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.04447621316813597, | |
| "grad_norm": 3.9224603176116943, | |
| "learning_rate": 8.860759493670886e-06, | |
| "loss": 0.6954, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04765308553728854, | |
| "grad_norm": 15.483328819274902, | |
| "learning_rate": 9.49367088607595e-06, | |
| "loss": 0.6716, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.05082995790644111, | |
| "grad_norm": 3.13858962059021, | |
| "learning_rate": 1.0126582278481014e-05, | |
| "loss": 0.6977, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05400683027559368, | |
| "grad_norm": 2.8523001670837402, | |
| "learning_rate": 1.0759493670886076e-05, | |
| "loss": 0.6585, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.057183702644746245, | |
| "grad_norm": 3.508849859237671, | |
| "learning_rate": 1.139240506329114e-05, | |
| "loss": 0.6787, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.06036057501389882, | |
| "grad_norm": 2.026244640350342, | |
| "learning_rate": 1.2025316455696203e-05, | |
| "loss": 0.6398, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.06353744738305138, | |
| "grad_norm": 3.702071189880371, | |
| "learning_rate": 1.2658227848101268e-05, | |
| "loss": 0.6447, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06671431975220396, | |
| "grad_norm": 2.9863617420196533, | |
| "learning_rate": 1.329113924050633e-05, | |
| "loss": 0.6313, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.06989119212135653, | |
| "grad_norm": 2.7040019035339355, | |
| "learning_rate": 1.3924050632911395e-05, | |
| "loss": 0.6274, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.0730680644905091, | |
| "grad_norm": 2.6223275661468506, | |
| "learning_rate": 1.4556962025316457e-05, | |
| "loss": 0.6421, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.07624493685966166, | |
| "grad_norm": 2.3697361946105957, | |
| "learning_rate": 1.5189873417721521e-05, | |
| "loss": 0.6268, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07942180922881423, | |
| "grad_norm": 2.227189302444458, | |
| "learning_rate": 1.5822784810126583e-05, | |
| "loss": 0.6415, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.08259868159796681, | |
| "grad_norm": 2.316190242767334, | |
| "learning_rate": 1.6455696202531647e-05, | |
| "loss": 0.6076, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.08577555396711938, | |
| "grad_norm": 2.2983508110046387, | |
| "learning_rate": 1.708860759493671e-05, | |
| "loss": 0.5924, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.08895242633627194, | |
| "grad_norm": 1.7339043617248535, | |
| "learning_rate": 1.7721518987341772e-05, | |
| "loss": 0.6322, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.09212929870542451, | |
| "grad_norm": 1.9135723114013672, | |
| "learning_rate": 1.8354430379746836e-05, | |
| "loss": 0.6169, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.09530617107457708, | |
| "grad_norm": 1.901855230331421, | |
| "learning_rate": 1.89873417721519e-05, | |
| "loss": 0.6093, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.09848304344372964, | |
| "grad_norm": 1.660704493522644, | |
| "learning_rate": 1.9620253164556964e-05, | |
| "loss": 0.6128, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.10165991581288222, | |
| "grad_norm": 1.8829810619354248, | |
| "learning_rate": 1.9999901413795314e-05, | |
| "loss": 0.6008, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.10483678818203479, | |
| "grad_norm": 2.084669351577759, | |
| "learning_rate": 1.9998792341316304e-05, | |
| "loss": 0.618, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.10801366055118736, | |
| "grad_norm": 1.7610174417495728, | |
| "learning_rate": 1.9996451100730896e-05, | |
| "loss": 0.6038, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.11119053292033992, | |
| "grad_norm": 2.0930941104888916, | |
| "learning_rate": 1.999287798055413e-05, | |
| "loss": 0.5973, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.11436740528949249, | |
| "grad_norm": 2.1560468673706055, | |
| "learning_rate": 1.9988073421107646e-05, | |
| "loss": 0.5826, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.11754427765864506, | |
| "grad_norm": 1.6942908763885498, | |
| "learning_rate": 1.998203801446545e-05, | |
| "loss": 0.5689, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.12072115002779764, | |
| "grad_norm": 1.6405870914459229, | |
| "learning_rate": 1.9974772504380907e-05, | |
| "loss": 0.5516, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1238980223969502, | |
| "grad_norm": 1.5230438709259033, | |
| "learning_rate": 1.9966277786195137e-05, | |
| "loss": 0.5703, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.12707489476610276, | |
| "grad_norm": 1.5323349237442017, | |
| "learning_rate": 1.9956554906726627e-05, | |
| "loss": 0.5821, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.13025176713525535, | |
| "grad_norm": 1.4441514015197754, | |
| "learning_rate": 1.994560506414229e-05, | |
| "loss": 0.6008, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.13342863950440792, | |
| "grad_norm": 1.5659058094024658, | |
| "learning_rate": 1.9933429607809746e-05, | |
| "loss": 0.586, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.1366055118735605, | |
| "grad_norm": 1.8428056240081787, | |
| "learning_rate": 1.9920030038131104e-05, | |
| "loss": 0.5608, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.13978238424271305, | |
| "grad_norm": 1.4605518579483032, | |
| "learning_rate": 1.990540800635801e-05, | |
| "loss": 0.5217, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.14295925661186562, | |
| "grad_norm": 1.4116017818450928, | |
| "learning_rate": 1.98895653143882e-05, | |
| "loss": 0.5979, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.1461361289810182, | |
| "grad_norm": 1.426926612854004, | |
| "learning_rate": 1.9872503914543416e-05, | |
| "loss": 0.5739, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.14931300135017075, | |
| "grad_norm": 1.7036380767822266, | |
| "learning_rate": 1.9854225909328845e-05, | |
| "loss": 0.56, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.15248987371932332, | |
| "grad_norm": 1.400126576423645, | |
| "learning_rate": 1.9834733551174014e-05, | |
| "loss": 0.5589, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1556667460884759, | |
| "grad_norm": 1.3639277219772339, | |
| "learning_rate": 1.9814029242155217e-05, | |
| "loss": 0.5827, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.15884361845762845, | |
| "grad_norm": 1.3997248411178589, | |
| "learning_rate": 1.9792115533699493e-05, | |
| "loss": 0.5768, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.16202049082678102, | |
| "grad_norm": 1.3513110876083374, | |
| "learning_rate": 1.976899512627024e-05, | |
| "loss": 0.56, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.16519736319593362, | |
| "grad_norm": 1.338847279548645, | |
| "learning_rate": 1.9744670869034407e-05, | |
| "loss": 0.5212, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.16837423556508618, | |
| "grad_norm": 1.4440042972564697, | |
| "learning_rate": 1.971914575951138e-05, | |
| "loss": 0.5708, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.17155110793423875, | |
| "grad_norm": 1.3869779109954834, | |
| "learning_rate": 1.969242294320362e-05, | |
| "loss": 0.5483, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.17472798030339132, | |
| "grad_norm": 1.5152400732040405, | |
| "learning_rate": 1.9664505713209017e-05, | |
| "loss": 0.554, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.17790485267254388, | |
| "grad_norm": 1.3730214834213257, | |
| "learning_rate": 1.9635397509815087e-05, | |
| "loss": 0.5478, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.18108172504169645, | |
| "grad_norm": 1.4039232730865479, | |
| "learning_rate": 1.9605101920075003e-05, | |
| "loss": 0.5253, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.18425859741084902, | |
| "grad_norm": 1.5447697639465332, | |
| "learning_rate": 1.9573622677365572e-05, | |
| "loss": 0.5343, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.18743546978000158, | |
| "grad_norm": 1.3592406511306763, | |
| "learning_rate": 1.954096366092717e-05, | |
| "loss": 0.572, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.19061234214915415, | |
| "grad_norm": 1.4715423583984375, | |
| "learning_rate": 1.9507128895385676e-05, | |
| "loss": 0.5579, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.19378921451830672, | |
| "grad_norm": 1.3932384252548218, | |
| "learning_rate": 1.9472122550256516e-05, | |
| "loss": 0.5669, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.19696608688745929, | |
| "grad_norm": 1.216783046722412, | |
| "learning_rate": 1.9435948939430868e-05, | |
| "loss": 0.5204, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.20014295925661185, | |
| "grad_norm": 1.2733324766159058, | |
| "learning_rate": 1.9398612520644028e-05, | |
| "loss": 0.5181, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.20331983162576445, | |
| "grad_norm": 1.4451696872711182, | |
| "learning_rate": 1.936011789492609e-05, | |
| "loss": 0.5507, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.206496703994917, | |
| "grad_norm": 1.3773140907287598, | |
| "learning_rate": 1.9320469806034937e-05, | |
| "loss": 0.5342, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.20967357636406958, | |
| "grad_norm": 1.330608606338501, | |
| "learning_rate": 1.9279673139871686e-05, | |
| "loss": 0.5355, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.21285044873322215, | |
| "grad_norm": 1.1820790767669678, | |
| "learning_rate": 1.923773292387857e-05, | |
| "loss": 0.5492, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.21602732110237471, | |
| "grad_norm": 1.4723114967346191, | |
| "learning_rate": 1.9194654326419405e-05, | |
| "loss": 0.5303, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.21920419347152728, | |
| "grad_norm": 1.3733022212982178, | |
| "learning_rate": 1.9150442656142675e-05, | |
| "loss": 0.5085, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.22238106584067985, | |
| "grad_norm": 1.2373732328414917, | |
| "learning_rate": 1.910510336132735e-05, | |
| "loss": 0.5333, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.22555793820983241, | |
| "grad_norm": 1.3679918050765991, | |
| "learning_rate": 1.9058642029211492e-05, | |
| "loss": 0.5518, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.22873481057898498, | |
| "grad_norm": 1.4029483795166016, | |
| "learning_rate": 1.9011064385303697e-05, | |
| "loss": 0.5217, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.23191168294813755, | |
| "grad_norm": 1.2620315551757812, | |
| "learning_rate": 1.896237629267757e-05, | |
| "loss": 0.5386, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.23508855531729012, | |
| "grad_norm": 1.207796335220337, | |
| "learning_rate": 1.8912583751249184e-05, | |
| "loss": 0.5053, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.2382654276864427, | |
| "grad_norm": 1.3968982696533203, | |
| "learning_rate": 1.886169289703771e-05, | |
| "loss": 0.5457, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.24144230005559528, | |
| "grad_norm": 1.2062251567840576, | |
| "learning_rate": 1.880971000140926e-05, | |
| "loss": 0.5248, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.24461917242474784, | |
| "grad_norm": 1.5815231800079346, | |
| "learning_rate": 1.875664147030406e-05, | |
| "loss": 0.534, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.2477960447939004, | |
| "grad_norm": 1.2773513793945312, | |
| "learning_rate": 1.870249384344704e-05, | |
| "loss": 0.5188, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.250972917163053, | |
| "grad_norm": 1.4699748754501343, | |
| "learning_rate": 1.864727379354191e-05, | |
| "loss": 0.5271, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.2541497895322055, | |
| "grad_norm": 1.5354286432266235, | |
| "learning_rate": 1.8590988125448906e-05, | |
| "loss": 0.5087, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2573266619013581, | |
| "grad_norm": 1.1777474880218506, | |
| "learning_rate": 1.853364377534618e-05, | |
| "loss": 0.4959, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.2605035342705107, | |
| "grad_norm": 1.197090744972229, | |
| "learning_rate": 1.8475247809875076e-05, | |
| "loss": 0.5336, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.26368040663966325, | |
| "grad_norm": 1.1266974210739136, | |
| "learning_rate": 1.8415807425269273e-05, | |
| "loss": 0.5502, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.26685727900881584, | |
| "grad_norm": 1.2330613136291504, | |
| "learning_rate": 1.8355329946467984e-05, | |
| "loss": 0.5332, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2700341513779684, | |
| "grad_norm": 1.227715015411377, | |
| "learning_rate": 1.8293822826213302e-05, | |
| "loss": 0.51, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.273211023747121, | |
| "grad_norm": 1.3449698686599731, | |
| "learning_rate": 1.823129364413178e-05, | |
| "loss": 0.5039, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2763878961162735, | |
| "grad_norm": 1.2230104207992554, | |
| "learning_rate": 1.8167750105800367e-05, | |
| "loss": 0.5356, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.2795647684854261, | |
| "grad_norm": 1.2294344902038574, | |
| "learning_rate": 1.8103200041796864e-05, | |
| "loss": 0.5379, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.28274164085457865, | |
| "grad_norm": 1.120505690574646, | |
| "learning_rate": 1.8037651406734936e-05, | |
| "loss": 0.5017, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.28591851322373124, | |
| "grad_norm": 1.1958080530166626, | |
| "learning_rate": 1.7971112278283836e-05, | |
| "loss": 0.5307, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.2890953855928838, | |
| "grad_norm": 1.2974480390548706, | |
| "learning_rate": 1.7903590856173004e-05, | |
| "loss": 0.5287, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.2922722579620364, | |
| "grad_norm": 1.2087041139602661, | |
| "learning_rate": 1.7835095461181594e-05, | |
| "loss": 0.4981, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.29544913033118897, | |
| "grad_norm": 1.2422888278961182, | |
| "learning_rate": 1.7765634534113075e-05, | |
| "loss": 0.5292, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.2986260027003415, | |
| "grad_norm": 1.2754653692245483, | |
| "learning_rate": 1.7695216634755076e-05, | |
| "loss": 0.5334, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.3018028750694941, | |
| "grad_norm": 1.4292696714401245, | |
| "learning_rate": 1.762385044082454e-05, | |
| "loss": 0.5223, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.30497974743864664, | |
| "grad_norm": 1.1018996238708496, | |
| "learning_rate": 1.7551544746898355e-05, | |
| "loss": 0.5291, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.30815661980779924, | |
| "grad_norm": 1.2348871231079102, | |
| "learning_rate": 1.7478308463329584e-05, | |
| "loss": 0.5271, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.3113334921769518, | |
| "grad_norm": 1.2747186422348022, | |
| "learning_rate": 1.7404150615149433e-05, | |
| "loss": 0.5171, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.31451036454610437, | |
| "grad_norm": 1.1623306274414062, | |
| "learning_rate": 1.7329080340955063e-05, | |
| "loss": 0.512, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.3176872369152569, | |
| "grad_norm": 1.3075213432312012, | |
| "learning_rate": 1.725310689178344e-05, | |
| "loss": 0.4984, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.3208641092844095, | |
| "grad_norm": 1.14993417263031, | |
| "learning_rate": 1.7176239629971325e-05, | |
| "loss": 0.5118, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.32404098165356204, | |
| "grad_norm": 1.3417556285858154, | |
| "learning_rate": 1.709848802800151e-05, | |
| "loss": 0.4961, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.32721785402271464, | |
| "grad_norm": 1.2760066986083984, | |
| "learning_rate": 1.7019861667335524e-05, | |
| "loss": 0.4922, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.33039472639186723, | |
| "grad_norm": 1.1567506790161133, | |
| "learning_rate": 1.6940370237232887e-05, | |
| "loss": 0.4858, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.33357159876101977, | |
| "grad_norm": 1.4701095819473267, | |
| "learning_rate": 1.6860023533557106e-05, | |
| "loss": 0.4964, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.33674847113017237, | |
| "grad_norm": 1.2197904586791992, | |
| "learning_rate": 1.677883145756848e-05, | |
| "loss": 0.487, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.3399253434993249, | |
| "grad_norm": 1.1546249389648438, | |
| "learning_rate": 1.669680401470398e-05, | |
| "loss": 0.4745, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.3431022158684775, | |
| "grad_norm": 1.1630467176437378, | |
| "learning_rate": 1.661395131334425e-05, | |
| "loss": 0.5103, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.34627908823763004, | |
| "grad_norm": 1.2125153541564941, | |
| "learning_rate": 1.6530283563567927e-05, | |
| "loss": 0.5193, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.34945596060678263, | |
| "grad_norm": 1.4535295963287354, | |
| "learning_rate": 1.644581107589346e-05, | |
| "loss": 0.5122, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3526328329759352, | |
| "grad_norm": 1.1809464693069458, | |
| "learning_rate": 1.636054426000849e-05, | |
| "loss": 0.5062, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.35580970534508777, | |
| "grad_norm": 1.2318694591522217, | |
| "learning_rate": 1.6274493623487094e-05, | |
| "loss": 0.4834, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3589865777142403, | |
| "grad_norm": 1.2554454803466797, | |
| "learning_rate": 1.6187669770494867e-05, | |
| "loss": 0.5008, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.3621634500833929, | |
| "grad_norm": 1.2687321901321411, | |
| "learning_rate": 1.6100083400482197e-05, | |
| "loss": 0.5019, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3653403224525455, | |
| "grad_norm": 1.148877501487732, | |
| "learning_rate": 1.6011745306865726e-05, | |
| "loss": 0.5094, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.36851719482169804, | |
| "grad_norm": 1.1435184478759766, | |
| "learning_rate": 1.5922666375698275e-05, | |
| "loss": 0.513, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.37169406719085063, | |
| "grad_norm": 1.151990294456482, | |
| "learning_rate": 1.583285758432732e-05, | |
| "loss": 0.4866, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.37487093956000317, | |
| "grad_norm": 1.1544941663742065, | |
| "learning_rate": 1.574233000004226e-05, | |
| "loss": 0.4835, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.37804781192915576, | |
| "grad_norm": 1.1831696033477783, | |
| "learning_rate": 1.5651094778710548e-05, | |
| "loss": 0.4941, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.3812246842983083, | |
| "grad_norm": 1.1588399410247803, | |
| "learning_rate": 1.5559163163402953e-05, | |
| "loss": 0.4891, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.3844015566674609, | |
| "grad_norm": 1.0901782512664795, | |
| "learning_rate": 1.5466546483008057e-05, | |
| "loss": 0.4954, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.38757842903661344, | |
| "grad_norm": 1.1416994333267212, | |
| "learning_rate": 1.5373256150836168e-05, | |
| "loss": 0.5006, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.39075530140576603, | |
| "grad_norm": 1.2424296140670776, | |
| "learning_rate": 1.527930366321284e-05, | |
| "loss": 0.4867, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.39393217377491857, | |
| "grad_norm": 1.2324466705322266, | |
| "learning_rate": 1.5184700598062172e-05, | |
| "loss": 0.5112, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.39710904614407116, | |
| "grad_norm": 1.193036675453186, | |
| "learning_rate": 1.508945861348003e-05, | |
| "loss": 0.4907, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.4002859185132237, | |
| "grad_norm": 1.25301992893219, | |
| "learning_rate": 1.4993589446297396e-05, | |
| "loss": 0.4811, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.4034627908823763, | |
| "grad_norm": 1.3160258531570435, | |
| "learning_rate": 1.4897104910634035e-05, | |
| "loss": 0.4873, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.4066396632515289, | |
| "grad_norm": 1.1781983375549316, | |
| "learning_rate": 1.4800016896442606e-05, | |
| "loss": 0.5187, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.40981653562068143, | |
| "grad_norm": 1.2555766105651855, | |
| "learning_rate": 1.4702337368043452e-05, | |
| "loss": 0.5007, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.412993407989834, | |
| "grad_norm": 1.3068451881408691, | |
| "learning_rate": 1.4604078362650212e-05, | |
| "loss": 0.491, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.41617028035898657, | |
| "grad_norm": 1.1431891918182373, | |
| "learning_rate": 1.4505251988886455e-05, | |
| "loss": 0.4887, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.41934715272813916, | |
| "grad_norm": 1.1333122253417969, | |
| "learning_rate": 1.4405870425293518e-05, | |
| "loss": 0.489, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.4225240250972917, | |
| "grad_norm": 1.1263874769210815, | |
| "learning_rate": 1.430594591882971e-05, | |
| "loss": 0.4832, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.4257008974664443, | |
| "grad_norm": 1.0670287609100342, | |
| "learning_rate": 1.420549078336113e-05, | |
| "loss": 0.5105, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.42887776983559683, | |
| "grad_norm": 1.1833176612854004, | |
| "learning_rate": 1.4104517398144162e-05, | |
| "loss": 0.5155, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.43205464220474943, | |
| "grad_norm": 1.251916766166687, | |
| "learning_rate": 1.40030382063e-05, | |
| "loss": 0.4977, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.43523151457390197, | |
| "grad_norm": 1.0563840866088867, | |
| "learning_rate": 1.3901065713281248e-05, | |
| "loss": 0.4783, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.43840838694305456, | |
| "grad_norm": 1.0980114936828613, | |
| "learning_rate": 1.3798612485330834e-05, | |
| "loss": 0.4698, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.44158525931220716, | |
| "grad_norm": 1.427857518196106, | |
| "learning_rate": 1.3695691147933475e-05, | |
| "loss": 0.5045, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.4447621316813597, | |
| "grad_norm": 1.2345526218414307, | |
| "learning_rate": 1.3592314384259809e-05, | |
| "loss": 0.5005, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4479390040505123, | |
| "grad_norm": 1.2202253341674805, | |
| "learning_rate": 1.3488494933603418e-05, | |
| "loss": 0.4712, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.45111587641966483, | |
| "grad_norm": 1.1247138977050781, | |
| "learning_rate": 1.3384245589810955e-05, | |
| "loss": 0.4886, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4542927487888174, | |
| "grad_norm": 1.2570569515228271, | |
| "learning_rate": 1.3279579199705537e-05, | |
| "loss": 0.4849, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.45746962115796996, | |
| "grad_norm": 1.1821649074554443, | |
| "learning_rate": 1.3174508661503591e-05, | |
| "loss": 0.4773, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.46064649352712256, | |
| "grad_norm": 1.215633749961853, | |
| "learning_rate": 1.306904692322541e-05, | |
| "loss": 0.49, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.4638233658962751, | |
| "grad_norm": 1.062605857849121, | |
| "learning_rate": 1.2963206981099528e-05, | |
| "loss": 0.4886, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4670002382654277, | |
| "grad_norm": 1.2538633346557617, | |
| "learning_rate": 1.2857001877961181e-05, | |
| "loss": 0.4874, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.47017711063458023, | |
| "grad_norm": 1.1429924964904785, | |
| "learning_rate": 1.2750444701645013e-05, | |
| "loss": 0.4954, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.4733539830037328, | |
| "grad_norm": 1.1001482009887695, | |
| "learning_rate": 1.264354858337225e-05, | |
| "loss": 0.4913, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.4765308553728854, | |
| "grad_norm": 1.1915541887283325, | |
| "learning_rate": 1.25363266961325e-05, | |
| "loss": 0.4925, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.47970772774203796, | |
| "grad_norm": 1.0904185771942139, | |
| "learning_rate": 1.242879225306043e-05, | |
| "loss": 0.461, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.48288460011119055, | |
| "grad_norm": 1.0332921743392944, | |
| "learning_rate": 1.232095850580751e-05, | |
| "loss": 0.4575, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.4860614724803431, | |
| "grad_norm": 1.1236475706100464, | |
| "learning_rate": 1.221283874290894e-05, | |
| "loss": 0.4813, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.4892383448494957, | |
| "grad_norm": 1.1378127336502075, | |
| "learning_rate": 1.2104446288146143e-05, | |
| "loss": 0.4574, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.4924152172186482, | |
| "grad_norm": 1.0330129861831665, | |
| "learning_rate": 1.1995794498904805e-05, | |
| "loss": 0.4872, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.4955920895878008, | |
| "grad_norm": 1.2020204067230225, | |
| "learning_rate": 1.1886896764528837e-05, | |
| "loss": 0.4936, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.49876896195695336, | |
| "grad_norm": 1.129347324371338, | |
| "learning_rate": 1.1777766504670397e-05, | |
| "loss": 0.4668, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.501945834326106, | |
| "grad_norm": 1.1032615900039673, | |
| "learning_rate": 1.1668417167636143e-05, | |
| "loss": 0.4953, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.5051227066952585, | |
| "grad_norm": 1.2174361944198608, | |
| "learning_rate": 1.1558862228729985e-05, | |
| "loss": 0.4856, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.508299579064411, | |
| "grad_norm": 1.0304588079452515, | |
| "learning_rate": 1.1449115188592505e-05, | |
| "loss": 0.4603, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.5114764514335637, | |
| "grad_norm": 1.1562561988830566, | |
| "learning_rate": 1.1339189571537244e-05, | |
| "loss": 0.4764, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.5146533238027162, | |
| "grad_norm": 1.1028969287872314, | |
| "learning_rate": 1.1229098923884065e-05, | |
| "loss": 0.4614, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.5178301961718688, | |
| "grad_norm": 1.118483304977417, | |
| "learning_rate": 1.1118856812289856e-05, | |
| "loss": 0.4934, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.5210070685410214, | |
| "grad_norm": 1.1935844421386719, | |
| "learning_rate": 1.1008476822076638e-05, | |
| "loss": 0.4737, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.524183940910174, | |
| "grad_norm": 1.1401269435882568, | |
| "learning_rate": 1.0897972555557465e-05, | |
| "loss": 0.4918, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.5273608132793265, | |
| "grad_norm": 1.0230424404144287, | |
| "learning_rate": 1.0787357630360163e-05, | |
| "loss": 0.462, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.530537685648479, | |
| "grad_norm": 1.0743178129196167, | |
| "learning_rate": 1.0676645677749215e-05, | |
| "loss": 0.4995, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.5337145580176317, | |
| "grad_norm": 1.170649766921997, | |
| "learning_rate": 1.0565850340945955e-05, | |
| "loss": 0.4951, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.5368914303867842, | |
| "grad_norm": 1.068259835243225, | |
| "learning_rate": 1.04549852734473e-05, | |
| "loss": 0.4667, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.5400683027559368, | |
| "grad_norm": 1.168420433998108, | |
| "learning_rate": 1.0344064137343187e-05, | |
| "loss": 0.4548, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5432451751250893, | |
| "grad_norm": 1.1319665908813477, | |
| "learning_rate": 1.0233100601632986e-05, | |
| "loss": 0.4744, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.546422047494242, | |
| "grad_norm": 1.0778429508209229, | |
| "learning_rate": 1.0122108340541053e-05, | |
| "loss": 0.4674, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5495989198633945, | |
| "grad_norm": 1.0879789590835571, | |
| "learning_rate": 1.0011101031831604e-05, | |
| "loss": 0.4536, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.552775792232547, | |
| "grad_norm": 1.0822393894195557, | |
| "learning_rate": 9.90009235512321e-06, | |
| "loss": 0.4662, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.5559526646016997, | |
| "grad_norm": 1.0387858152389526, | |
| "learning_rate": 9.789095990203025e-06, | |
| "loss": 0.4438, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.5591295369708522, | |
| "grad_norm": 1.1683969497680664, | |
| "learning_rate": 9.678125615340986e-06, | |
| "loss": 0.4834, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5623064093400048, | |
| "grad_norm": 1.0404331684112549, | |
| "learning_rate": 9.567194905604245e-06, | |
| "loss": 0.4651, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.5654832817091573, | |
| "grad_norm": 0.9641637802124023, | |
| "learning_rate": 9.456317531171947e-06, | |
| "loss": 0.4808, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5686601540783099, | |
| "grad_norm": 0.9650170207023621, | |
| "learning_rate": 9.345507155650645e-06, | |
| "loss": 0.4903, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.5718370264474625, | |
| "grad_norm": 1.0568928718566895, | |
| "learning_rate": 9.234777434390492e-06, | |
| "loss": 0.4553, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.575013898816615, | |
| "grad_norm": 1.1297322511672974, | |
| "learning_rate": 9.12414201280248e-06, | |
| "loss": 0.4726, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.5781907711857676, | |
| "grad_norm": 1.1371909379959106, | |
| "learning_rate": 9.013614524676907e-06, | |
| "loss": 0.4639, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5813676435549202, | |
| "grad_norm": 1.0769912004470825, | |
| "learning_rate": 8.90320859050323e-06, | |
| "loss": 0.4587, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.5845445159240727, | |
| "grad_norm": 1.187941551208496, | |
| "learning_rate": 8.792937815791624e-06, | |
| "loss": 0.4551, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5877213882932253, | |
| "grad_norm": 1.0256364345550537, | |
| "learning_rate": 8.682815789396318e-06, | |
| "loss": 0.4703, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.5908982606623779, | |
| "grad_norm": 1.176479458808899, | |
| "learning_rate": 8.57285608184104e-06, | |
| "loss": 0.471, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5940751330315305, | |
| "grad_norm": 0.9811561107635498, | |
| "learning_rate": 8.4630722436467e-06, | |
| "loss": 0.4924, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.597252005400683, | |
| "grad_norm": 1.0429517030715942, | |
| "learning_rate": 8.353477803661526e-06, | |
| "loss": 0.4565, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.6004288777698356, | |
| "grad_norm": 1.0858372449874878, | |
| "learning_rate": 8.24408626739387e-06, | |
| "loss": 0.4519, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.6036057501389882, | |
| "grad_norm": 1.1124157905578613, | |
| "learning_rate": 8.134911115347934e-06, | |
| "loss": 0.4947, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.6067826225081407, | |
| "grad_norm": 1.1018826961517334, | |
| "learning_rate": 8.02596580136252e-06, | |
| "loss": 0.4721, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.6099594948772933, | |
| "grad_norm": 1.2431491613388062, | |
| "learning_rate": 7.917263750953092e-06, | |
| "loss": 0.4765, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.6131363672464458, | |
| "grad_norm": 1.1556727886199951, | |
| "learning_rate": 7.80881835965734e-06, | |
| "loss": 0.4622, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.6163132396155985, | |
| "grad_norm": 1.1924182176589966, | |
| "learning_rate": 7.700642991384407e-06, | |
| "loss": 0.484, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.619490111984751, | |
| "grad_norm": 1.068928599357605, | |
| "learning_rate": 7.592750976768048e-06, | |
| "loss": 0.4538, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.6226669843539036, | |
| "grad_norm": 1.0944935083389282, | |
| "learning_rate": 7.485155611523869e-06, | |
| "loss": 0.4763, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.6258438567230562, | |
| "grad_norm": 0.954535961151123, | |
| "learning_rate": 7.377870154810869e-06, | |
| "loss": 0.4719, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.6290207290922087, | |
| "grad_norm": 1.132608413696289, | |
| "learning_rate": 7.270907827597487e-06, | |
| "loss": 0.4548, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.6321976014613613, | |
| "grad_norm": 1.0536706447601318, | |
| "learning_rate": 7.16428181103238e-06, | |
| "loss": 0.4558, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.6353744738305138, | |
| "grad_norm": 1.074761986732483, | |
| "learning_rate": 7.058005244820068e-06, | |
| "loss": 0.4838, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.6385513461996665, | |
| "grad_norm": 1.0573989152908325, | |
| "learning_rate": 6.952091225601713e-06, | |
| "loss": 0.4491, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.641728218568819, | |
| "grad_norm": 1.0783993005752563, | |
| "learning_rate": 6.846552805341194e-06, | |
| "loss": 0.4525, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.6449050909379715, | |
| "grad_norm": 1.0637626647949219, | |
| "learning_rate": 6.7414029897167e-06, | |
| "loss": 0.4771, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.6480819633071241, | |
| "grad_norm": 1.1191637516021729, | |
| "learning_rate": 6.636654736518007e-06, | |
| "loss": 0.4574, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.6512588356762767, | |
| "grad_norm": 1.099088430404663, | |
| "learning_rate": 6.532320954049682e-06, | |
| "loss": 0.4604, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.6544357080454293, | |
| "grad_norm": 1.058421015739441, | |
| "learning_rate": 6.4284144995403565e-06, | |
| "loss": 0.4469, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.6576125804145818, | |
| "grad_norm": 1.0653492212295532, | |
| "learning_rate": 6.324948177558307e-06, | |
| "loss": 0.4554, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.6607894527837345, | |
| "grad_norm": 1.049944519996643, | |
| "learning_rate": 6.2219347384335505e-06, | |
| "loss": 0.4835, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.663966325152887, | |
| "grad_norm": 1.0621883869171143, | |
| "learning_rate": 6.119386876686571e-06, | |
| "loss": 0.463, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.6671431975220395, | |
| "grad_norm": 1.0801547765731812, | |
| "learning_rate": 6.017317229463968e-06, | |
| "loss": 0.4681, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.6703200698911921, | |
| "grad_norm": 1.023689866065979, | |
| "learning_rate": 5.91573837498115e-06, | |
| "loss": 0.4587, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.6734969422603447, | |
| "grad_norm": 1.0890988111495972, | |
| "learning_rate": 5.8146628309723155e-06, | |
| "loss": 0.4363, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.6766738146294973, | |
| "grad_norm": 1.0633463859558105, | |
| "learning_rate": 5.714103053147852e-06, | |
| "loss": 0.4443, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.6798506869986498, | |
| "grad_norm": 1.1121097803115845, | |
| "learning_rate": 5.6140714336594086e-06, | |
| "loss": 0.4563, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6830275593678024, | |
| "grad_norm": 1.051448106765747, | |
| "learning_rate": 5.514580299572801e-06, | |
| "loss": 0.4478, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.686204431736955, | |
| "grad_norm": 1.077275037765503, | |
| "learning_rate": 5.415641911348893e-06, | |
| "loss": 0.4798, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6893813041061075, | |
| "grad_norm": 1.1081876754760742, | |
| "learning_rate": 5.31726846133275e-06, | |
| "loss": 0.4511, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.6925581764752601, | |
| "grad_norm": 1.0196533203125, | |
| "learning_rate": 5.219472072251154e-06, | |
| "loss": 0.4598, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6957350488444127, | |
| "grad_norm": 1.0347778797149658, | |
| "learning_rate": 5.12226479571868e-06, | |
| "loss": 0.4458, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.6989119212135653, | |
| "grad_norm": 1.0776883363723755, | |
| "learning_rate": 5.025658610752568e-06, | |
| "loss": 0.4322, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.7020887935827178, | |
| "grad_norm": 1.0258809328079224, | |
| "learning_rate": 4.929665422296532e-06, | |
| "loss": 0.4376, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.7052656659518703, | |
| "grad_norm": 1.0535441637039185, | |
| "learning_rate": 4.834297059753682e-06, | |
| "loss": 0.4496, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.708442538321023, | |
| "grad_norm": 1.0777946710586548, | |
| "learning_rate": 4.739565275528773e-06, | |
| "loss": 0.455, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.7116194106901755, | |
| "grad_norm": 1.0707253217697144, | |
| "learning_rate": 4.645481743579949e-06, | |
| "loss": 0.4795, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.7147962830593281, | |
| "grad_norm": 0.963762640953064, | |
| "learning_rate": 4.55205805798011e-06, | |
| "loss": 0.4429, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.7179731554284806, | |
| "grad_norm": 1.0962485074996948, | |
| "learning_rate": 4.45930573148818e-06, | |
| "loss": 0.494, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.7211500277976333, | |
| "grad_norm": 1.0998930931091309, | |
| "learning_rate": 4.367236194130375e-06, | |
| "loss": 0.4401, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.7243269001667858, | |
| "grad_norm": 0.9984025955200195, | |
| "learning_rate": 4.275860791791638e-06, | |
| "loss": 0.4433, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.7275037725359383, | |
| "grad_norm": 1.0686578750610352, | |
| "learning_rate": 4.185190784817478e-06, | |
| "loss": 0.4553, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.730680644905091, | |
| "grad_norm": 1.015057921409607, | |
| "learning_rate": 4.095237346626345e-06, | |
| "loss": 0.4366, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.7338575172742435, | |
| "grad_norm": 1.0694704055786133, | |
| "learning_rate": 4.006011562332702e-06, | |
| "loss": 0.4503, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.7370343896433961, | |
| "grad_norm": 1.125226616859436, | |
| "learning_rate": 3.917524427380992e-06, | |
| "loss": 0.4532, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.7402112620125486, | |
| "grad_norm": 1.087633490562439, | |
| "learning_rate": 3.829786846190648e-06, | |
| "loss": 0.456, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.7433881343817013, | |
| "grad_norm": 1.0903210639953613, | |
| "learning_rate": 3.742809630812322e-06, | |
| "loss": 0.4406, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.7465650067508538, | |
| "grad_norm": 1.1029974222183228, | |
| "learning_rate": 3.6566034995955e-06, | |
| "loss": 0.4549, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.7497418791200063, | |
| "grad_norm": 1.0449450016021729, | |
| "learning_rate": 3.571179075867671e-06, | |
| "loss": 0.4613, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.7529187514891589, | |
| "grad_norm": 1.0384024381637573, | |
| "learning_rate": 3.4865468866251794e-06, | |
| "loss": 0.464, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.7560956238583115, | |
| "grad_norm": 1.0278702974319458, | |
| "learning_rate": 3.402717361235961e-06, | |
| "loss": 0.4568, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.7592724962274641, | |
| "grad_norm": 1.0673890113830566, | |
| "learning_rate": 3.3197008301543497e-06, | |
| "loss": 0.4594, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.7624493685966166, | |
| "grad_norm": 1.0114554166793823, | |
| "learning_rate": 3.2375075236480003e-06, | |
| "loss": 0.4482, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.7656262409657691, | |
| "grad_norm": 1.101015329360962, | |
| "learning_rate": 3.156147570537209e-06, | |
| "loss": 0.4596, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.7688031133349218, | |
| "grad_norm": 1.1032803058624268, | |
| "learning_rate": 3.075630996946729e-06, | |
| "loss": 0.4442, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.7719799857040743, | |
| "grad_norm": 1.0375980138778687, | |
| "learning_rate": 2.9959677250702223e-06, | |
| "loss": 0.4511, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.7751568580732269, | |
| "grad_norm": 1.0073593854904175, | |
| "learning_rate": 2.9171675719475355e-06, | |
| "loss": 0.4384, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.7783337304423795, | |
| "grad_norm": 1.045289158821106, | |
| "learning_rate": 2.8392402482549397e-06, | |
| "loss": 0.4719, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.7815106028115321, | |
| "grad_norm": 0.984451949596405, | |
| "learning_rate": 2.762195357108448e-06, | |
| "loss": 0.4386, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7846874751806846, | |
| "grad_norm": 1.0313149690628052, | |
| "learning_rate": 2.6860423928804135e-06, | |
| "loss": 0.4381, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.7878643475498371, | |
| "grad_norm": 1.0065529346466064, | |
| "learning_rate": 2.6107907400295385e-06, | |
| "loss": 0.467, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7910412199189898, | |
| "grad_norm": 1.0645593404769897, | |
| "learning_rate": 2.53644967194439e-06, | |
| "loss": 0.45, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.7942180922881423, | |
| "grad_norm": 0.991398811340332, | |
| "learning_rate": 2.4630283498006323e-06, | |
| "loss": 0.4583, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7973949646572949, | |
| "grad_norm": 0.9970043897628784, | |
| "learning_rate": 2.390535821432084e-06, | |
| "loss": 0.4438, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.8005718370264474, | |
| "grad_norm": 1.015313744544983, | |
| "learning_rate": 2.3189810202157337e-06, | |
| "loss": 0.4797, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.8037487093956001, | |
| "grad_norm": 1.107275128364563, | |
| "learning_rate": 2.2483727639708606e-06, | |
| "loss": 0.4594, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.8069255817647526, | |
| "grad_norm": 1.0633456707000732, | |
| "learning_rate": 2.1787197538724147e-06, | |
| "loss": 0.4462, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.8101024541339051, | |
| "grad_norm": 1.0543006658554077, | |
| "learning_rate": 2.1100305733787406e-06, | |
| "loss": 0.4622, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.8132793265030578, | |
| "grad_norm": 1.0409566164016724, | |
| "learning_rate": 2.0423136871738227e-06, | |
| "loss": 0.4397, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.8164561988722103, | |
| "grad_norm": 1.0549768209457397, | |
| "learning_rate": 1.9755774401241866e-06, | |
| "loss": 0.4426, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.8196330712413629, | |
| "grad_norm": 1.0367543697357178, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.4435, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.8228099436105154, | |
| "grad_norm": 1.0637731552124023, | |
| "learning_rate": 1.8450796377142566e-06, | |
| "loss": 0.4147, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.825986815979668, | |
| "grad_norm": 1.0210437774658203, | |
| "learning_rate": 1.781334163819064e-06, | |
| "loss": 0.479, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.8291636883488206, | |
| "grad_norm": 1.0538122653961182, | |
| "learning_rate": 1.718601490027606e-06, | |
| "loss": 0.4366, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.8323405607179731, | |
| "grad_norm": 1.0405868291854858, | |
| "learning_rate": 1.6568893469934666e-06, | |
| "loss": 0.4342, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.8355174330871257, | |
| "grad_norm": 1.0490772724151611, | |
| "learning_rate": 1.5962053396085075e-06, | |
| "loss": 0.4526, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.8386943054562783, | |
| "grad_norm": 1.0285438299179077, | |
| "learning_rate": 1.5365569460656793e-06, | |
| "loss": 0.4542, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.8418711778254309, | |
| "grad_norm": 1.1192095279693604, | |
| "learning_rate": 1.4779515169374914e-06, | |
| "loss": 0.4391, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.8450480501945834, | |
| "grad_norm": 0.988335371017456, | |
| "learning_rate": 1.4203962742701893e-06, | |
| "loss": 0.4377, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.848224922563736, | |
| "grad_norm": 1.085839033126831, | |
| "learning_rate": 1.3638983106937543e-06, | |
| "loss": 0.4477, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.8514017949328886, | |
| "grad_norm": 0.9356504678726196, | |
| "learning_rate": 1.3084645885478797e-06, | |
| "loss": 0.469, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.8545786673020411, | |
| "grad_norm": 1.0390148162841797, | |
| "learning_rate": 1.254101939023985e-06, | |
| "loss": 0.4328, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.8577555396711937, | |
| "grad_norm": 1.0623959302902222, | |
| "learning_rate": 1.2008170613233971e-06, | |
| "loss": 0.4422, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.8609324120403463, | |
| "grad_norm": 1.1521815061569214, | |
| "learning_rate": 1.1486165218317957e-06, | |
| "loss": 0.4512, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.8641092844094989, | |
| "grad_norm": 1.0439189672470093, | |
| "learning_rate": 1.0975067533100337e-06, | |
| "loss": 0.4304, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.8672861567786514, | |
| "grad_norm": 0.9963186383247375, | |
| "learning_rate": 1.0474940541014e-06, | |
| "loss": 0.4328, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.8704630291478039, | |
| "grad_norm": 1.0758447647094727, | |
| "learning_rate": 9.9858458735548e-07, | |
| "loss": 0.4542, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.8736399015169566, | |
| "grad_norm": 1.106645941734314, | |
| "learning_rate": 9.507843802686623e-07, | |
| "loss": 0.448, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.8768167738861091, | |
| "grad_norm": 1.107141137123108, | |
| "learning_rate": 9.040993233413787e-07, | |
| "loss": 0.4333, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.8799936462552617, | |
| "grad_norm": 1.0433176755905151, | |
| "learning_rate": 8.585351696522248e-07, | |
| "loss": 0.4368, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.8831705186244143, | |
| "grad_norm": 1.0009626150131226, | |
| "learning_rate": 8.140975341489921e-07, | |
| "loss": 0.4386, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.8863473909935669, | |
| "grad_norm": 1.0583575963974, | |
| "learning_rate": 7.707918929567282e-07, | |
| "loss": 0.4414, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.8895242633627194, | |
| "grad_norm": 1.043960452079773, | |
| "learning_rate": 7.286235827029042e-07, | |
| "loss": 0.4434, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.8927011357318719, | |
| "grad_norm": 0.9781895875930786, | |
| "learning_rate": 6.875977998597828e-07, | |
| "loss": 0.4473, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.8958780081010246, | |
| "grad_norm": 0.9682676792144775, | |
| "learning_rate": 6.477196001040254e-07, | |
| "loss": 0.4325, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.8990548804701771, | |
| "grad_norm": 0.9957118630409241, | |
| "learning_rate": 6.089938976936971e-07, | |
| "loss": 0.415, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.9022317528393297, | |
| "grad_norm": 1.020727276802063, | |
| "learning_rate": 5.714254648626639e-07, | |
| "loss": 0.4353, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.9054086252084822, | |
| "grad_norm": 1.0520106554031372, | |
| "learning_rate": 5.350189312324993e-07, | |
| "loss": 0.4346, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.9085854975776348, | |
| "grad_norm": 1.0706285238265991, | |
| "learning_rate": 4.997787832419699e-07, | |
| "loss": 0.4297, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.9117623699467874, | |
| "grad_norm": 1.1210540533065796, | |
| "learning_rate": 4.657093635941701e-07, | |
| "loss": 0.4568, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.9149392423159399, | |
| "grad_norm": 1.1412665843963623, | |
| "learning_rate": 4.328148707213564e-07, | |
| "loss": 0.454, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.9181161146850926, | |
| "grad_norm": 1.0113401412963867, | |
| "learning_rate": 4.010993582675693e-07, | |
| "loss": 0.4604, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.9212929870542451, | |
| "grad_norm": 0.9700157642364502, | |
| "learning_rate": 3.7056673458909953e-07, | |
| "loss": 0.4486, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.9244698594233977, | |
| "grad_norm": 0.950957179069519, | |
| "learning_rate": 3.412207622728458e-07, | |
| "loss": 0.4198, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.9276467317925502, | |
| "grad_norm": 0.957328200340271, | |
| "learning_rate": 3.130650576726557e-07, | |
| "loss": 0.4155, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.9308236041617028, | |
| "grad_norm": 1.1100578308105469, | |
| "learning_rate": 2.861030904636708e-07, | |
| "loss": 0.4339, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.9340004765308554, | |
| "grad_norm": 1.0033955574035645, | |
| "learning_rate": 2.603381832147522e-07, | |
| "loss": 0.4551, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.9371773489000079, | |
| "grad_norm": 1.0719983577728271, | |
| "learning_rate": 2.3577351097903157e-07, | |
| "loss": 0.4368, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.9403542212691605, | |
| "grad_norm": 1.0672398805618286, | |
| "learning_rate": 2.1241210090265697e-07, | |
| "loss": 0.4383, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.9435310936383131, | |
| "grad_norm": 1.0421243906021118, | |
| "learning_rate": 1.9025683185173727e-07, | |
| "loss": 0.4509, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.9467079660074657, | |
| "grad_norm": 1.0599042177200317, | |
| "learning_rate": 1.6931043405758128e-07, | |
| "loss": 0.438, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.9498848383766182, | |
| "grad_norm": 0.9902300834655762, | |
| "learning_rate": 1.4957548878025029e-07, | |
| "loss": 0.4637, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.9530617107457708, | |
| "grad_norm": 0.9928290843963623, | |
| "learning_rate": 1.3105442799045576e-07, | |
| "loss": 0.4422, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.9562385831149234, | |
| "grad_norm": 1.024328589439392, | |
| "learning_rate": 1.1374953406987244e-07, | |
| "loss": 0.4593, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.9594154554840759, | |
| "grad_norm": 0.9927188158035278, | |
| "learning_rate": 9.766293952987449e-08, | |
| "loss": 0.445, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.9625923278532285, | |
| "grad_norm": 1.156173586845398, | |
| "learning_rate": 8.279662674873679e-08, | |
| "loss": 0.429, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.9657692002223811, | |
| "grad_norm": 1.0015678405761719, | |
| "learning_rate": 6.915242772734809e-08, | |
| "loss": 0.4481, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.9689460725915336, | |
| "grad_norm": 0.9944911599159241, | |
| "learning_rate": 5.673202386345389e-08, | |
| "loss": 0.4597, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.9721229449606862, | |
| "grad_norm": 0.9489125609397888, | |
| "learning_rate": 4.553694574444656e-08, | |
| "loss": 0.4471, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.9752998173298387, | |
| "grad_norm": 1.0264545679092407, | |
| "learning_rate": 3.5568572958752935e-08, | |
| "loss": 0.4408, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.9784766896989914, | |
| "grad_norm": 1.0240846872329712, | |
| "learning_rate": 2.682813392582917e-08, | |
| "loss": 0.4575, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.9816535620681439, | |
| "grad_norm": 0.995697021484375, | |
| "learning_rate": 1.9316705744769626e-08, | |
| "loss": 0.4261, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.9848304344372965, | |
| "grad_norm": 1.057746171951294, | |
| "learning_rate": 1.3035214061586365e-08, | |
| "loss": 0.4561, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.9880073068064491, | |
| "grad_norm": 1.13213312625885, | |
| "learning_rate": 7.984432955133736e-09, | |
| "loss": 0.4321, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.9911841791756016, | |
| "grad_norm": 1.0002557039260864, | |
| "learning_rate": 4.164984841715791e-09, | |
| "loss": 0.4579, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.9943610515447542, | |
| "grad_norm": 1.1303766965866089, | |
| "learning_rate": 1.5773403983909697e-09, | |
| "loss": 0.4427, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.9975379239139067, | |
| "grad_norm": 1.0552936792373657, | |
| "learning_rate": 2.218185049629451e-10, | |
| "loss": 0.437, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.9994440473353983, | |
| "step": 1573, | |
| "total_flos": 4.7290819683968614e+17, | |
| "train_loss": 0.5221994996298216, | |
| "train_runtime": 21044.2981, | |
| "train_samples_per_second": 1.795, | |
| "train_steps_per_second": 0.075 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1573, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.7290819683968614e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |