| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.15292113438220262, | |
| "eval_steps": 500, | |
| "global_step": 6104, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.000776630924942379, | |
| "grad_norm": 23.55321502685547, | |
| "learning_rate": 1.0157273918741808e-06, | |
| "loss": 8.3443, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.001553261849884758, | |
| "grad_norm": 14.414477348327637, | |
| "learning_rate": 2.0314547837483616e-06, | |
| "loss": 7.198, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.002329892774827137, | |
| "grad_norm": 10.739773750305176, | |
| "learning_rate": 3.0471821756225426e-06, | |
| "loss": 5.8639, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.003106523699769516, | |
| "grad_norm": 5.378795623779297, | |
| "learning_rate": 4.062909567496723e-06, | |
| "loss": 4.8222, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.003883154624711895, | |
| "grad_norm": 8.51831340789795, | |
| "learning_rate": 5.078636959370905e-06, | |
| "loss": 4.1983, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.004659785549654274, | |
| "grad_norm": 6.32632303237915, | |
| "learning_rate": 6.094364351245085e-06, | |
| "loss": 3.8598, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.005436416474596653, | |
| "grad_norm": 5.590794563293457, | |
| "learning_rate": 7.110091743119267e-06, | |
| "loss": 3.6646, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.006213047399539032, | |
| "grad_norm": 5.9275383949279785, | |
| "learning_rate": 8.125819134993446e-06, | |
| "loss": 3.5212, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.006989678324481411, | |
| "grad_norm": 5.041291236877441, | |
| "learning_rate": 9.141546526867629e-06, | |
| "loss": 3.3936, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.00776630924942379, | |
| "grad_norm": 4.923459529876709, | |
| "learning_rate": 1.015727391874181e-05, | |
| "loss": 3.2713, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.00854294017436617, | |
| "grad_norm": 8.011459350585938, | |
| "learning_rate": 1.117300131061599e-05, | |
| "loss": 3.2, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.009319571099308548, | |
| "grad_norm": 5.261505603790283, | |
| "learning_rate": 1.218872870249017e-05, | |
| "loss": 3.1255, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.010096202024250926, | |
| "grad_norm": 5.004406452178955, | |
| "learning_rate": 1.3204456094364351e-05, | |
| "loss": 3.0726, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.010872832949193306, | |
| "grad_norm": 4.326263427734375, | |
| "learning_rate": 1.4220183486238533e-05, | |
| "loss": 2.9682, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.011649463874135685, | |
| "grad_norm": 4.251937389373779, | |
| "learning_rate": 1.5235910878112714e-05, | |
| "loss": 2.9215, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.012426094799078065, | |
| "grad_norm": 4.35053825378418, | |
| "learning_rate": 1.6251638269986893e-05, | |
| "loss": 2.8902, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.013202725724020443, | |
| "grad_norm": 4.316098690032959, | |
| "learning_rate": 1.7267365661861077e-05, | |
| "loss": 2.8521, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.013979356648962823, | |
| "grad_norm": 5.536881446838379, | |
| "learning_rate": 1.8283093053735257e-05, | |
| "loss": 2.8058, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.014755987573905201, | |
| "grad_norm": 3.759843111038208, | |
| "learning_rate": 1.9298820445609438e-05, | |
| "loss": 2.7438, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.01553261849884758, | |
| "grad_norm": 4.057703018188477, | |
| "learning_rate": 2.031454783748362e-05, | |
| "loss": 2.7508, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.01630924942378996, | |
| "grad_norm": 3.427708625793457, | |
| "learning_rate": 2.13302752293578e-05, | |
| "loss": 2.7046, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.01708588034873234, | |
| "grad_norm": 3.9012484550476074, | |
| "learning_rate": 2.234600262123198e-05, | |
| "loss": 2.6706, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.017862511273674716, | |
| "grad_norm": 3.388504981994629, | |
| "learning_rate": 2.336173001310616e-05, | |
| "loss": 2.6377, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.018639142198617096, | |
| "grad_norm": 3.2011327743530273, | |
| "learning_rate": 2.437745740498034e-05, | |
| "loss": 2.6214, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.019415773123559476, | |
| "grad_norm": 3.5897209644317627, | |
| "learning_rate": 2.5393184796854525e-05, | |
| "loss": 2.6045, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.020192404048501852, | |
| "grad_norm": 3.386697769165039, | |
| "learning_rate": 2.6408912188728702e-05, | |
| "loss": 2.6066, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.020969034973444232, | |
| "grad_norm": 2.9135866165161133, | |
| "learning_rate": 2.7424639580602886e-05, | |
| "loss": 2.5697, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.021745665898386612, | |
| "grad_norm": 9.212065696716309, | |
| "learning_rate": 2.8440366972477066e-05, | |
| "loss": 2.5773, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.022522296823328992, | |
| "grad_norm": 3.0339765548706055, | |
| "learning_rate": 2.9456094364351244e-05, | |
| "loss": 2.5453, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.02329892774827137, | |
| "grad_norm": 2.8007607460021973, | |
| "learning_rate": 3.0471821756225428e-05, | |
| "loss": 2.5297, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.02407555867321375, | |
| "grad_norm": 3.251768112182617, | |
| "learning_rate": 3.148754914809961e-05, | |
| "loss": 2.5273, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.02485218959815613, | |
| "grad_norm": 3.357011079788208, | |
| "learning_rate": 3.2503276539973785e-05, | |
| "loss": 2.4896, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.025628820523098506, | |
| "grad_norm": 2.6069107055664062, | |
| "learning_rate": 3.351900393184797e-05, | |
| "loss": 2.4718, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 0.026405451448040886, | |
| "grad_norm": 4.663620948791504, | |
| "learning_rate": 3.453473132372215e-05, | |
| "loss": 2.4338, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 0.027182082372983266, | |
| "grad_norm": 3.819638252258301, | |
| "learning_rate": 3.555045871559633e-05, | |
| "loss": 2.4848, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.027958713297925646, | |
| "grad_norm": 2.6367456912994385, | |
| "learning_rate": 3.6566186107470514e-05, | |
| "loss": 2.4415, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 0.028735344222868022, | |
| "grad_norm": 2.6118173599243164, | |
| "learning_rate": 3.7581913499344695e-05, | |
| "loss": 2.4341, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 0.029511975147810402, | |
| "grad_norm": 9.820366859436035, | |
| "learning_rate": 3.8597640891218876e-05, | |
| "loss": 2.3773, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 0.030288606072752782, | |
| "grad_norm": 38.890506744384766, | |
| "learning_rate": 3.9613368283093056e-05, | |
| "loss": 2.3966, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 0.03106523699769516, | |
| "grad_norm": 2.5037872791290283, | |
| "learning_rate": 4.062909567496724e-05, | |
| "loss": 2.3421, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.03184186792263754, | |
| "grad_norm": 2.426544189453125, | |
| "learning_rate": 4.164482306684142e-05, | |
| "loss": 2.3568, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 0.03261849884757992, | |
| "grad_norm": 2.5375614166259766, | |
| "learning_rate": 4.26605504587156e-05, | |
| "loss": 2.3314, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 0.0333951297725223, | |
| "grad_norm": 3.555330276489258, | |
| "learning_rate": 4.367627785058978e-05, | |
| "loss": 2.3262, | |
| "step": 1333 | |
| }, | |
| { | |
| "epoch": 0.03417176069746468, | |
| "grad_norm": 2.2571263313293457, | |
| "learning_rate": 4.469200524246396e-05, | |
| "loss": 2.3167, | |
| "step": 1364 | |
| }, | |
| { | |
| "epoch": 0.03494839162240705, | |
| "grad_norm": 2.184305429458618, | |
| "learning_rate": 4.570773263433814e-05, | |
| "loss": 2.2936, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.03572502254734943, | |
| "grad_norm": 2.8295061588287354, | |
| "learning_rate": 4.672346002621232e-05, | |
| "loss": 2.3033, | |
| "step": 1426 | |
| }, | |
| { | |
| "epoch": 0.03650165347229181, | |
| "grad_norm": 2.0241498947143555, | |
| "learning_rate": 4.77391874180865e-05, | |
| "loss": 2.2833, | |
| "step": 1457 | |
| }, | |
| { | |
| "epoch": 0.03727828439723419, | |
| "grad_norm": 2.0029919147491455, | |
| "learning_rate": 4.875491480996068e-05, | |
| "loss": 2.2663, | |
| "step": 1488 | |
| }, | |
| { | |
| "epoch": 0.03805491532217657, | |
| "grad_norm": 2.033773899078369, | |
| "learning_rate": 4.977064220183487e-05, | |
| "loss": 2.2597, | |
| "step": 1519 | |
| }, | |
| { | |
| "epoch": 0.03883154624711895, | |
| "grad_norm": 2.047210216522217, | |
| "learning_rate": 4.9999915451558777e-05, | |
| "loss": 2.2231, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.03960817717206133, | |
| "grad_norm": 2.032099485397339, | |
| "learning_rate": 4.999955597496219e-05, | |
| "loss": 2.2411, | |
| "step": 1581 | |
| }, | |
| { | |
| "epoch": 0.040384808097003705, | |
| "grad_norm": 1.8384301662445068, | |
| "learning_rate": 4.9998914381774255e-05, | |
| "loss": 2.2409, | |
| "step": 1612 | |
| }, | |
| { | |
| "epoch": 0.041161439021946085, | |
| "grad_norm": 1.721187710762024, | |
| "learning_rate": 4.999799067923527e-05, | |
| "loss": 2.193, | |
| "step": 1643 | |
| }, | |
| { | |
| "epoch": 0.041938069946888465, | |
| "grad_norm": 1.8869743347167969, | |
| "learning_rate": 4.999678487776908e-05, | |
| "loss": 2.1917, | |
| "step": 1674 | |
| }, | |
| { | |
| "epoch": 0.042714700871830845, | |
| "grad_norm": 1.796583652496338, | |
| "learning_rate": 4.9995296990983006e-05, | |
| "loss": 2.1794, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.043491331796773225, | |
| "grad_norm": 1.6308804750442505, | |
| "learning_rate": 4.999352703566763e-05, | |
| "loss": 2.1854, | |
| "step": 1736 | |
| }, | |
| { | |
| "epoch": 0.044267962721715605, | |
| "grad_norm": 1.683474063873291, | |
| "learning_rate": 4.999147503179668e-05, | |
| "loss": 2.1797, | |
| "step": 1767 | |
| }, | |
| { | |
| "epoch": 0.045044593646657985, | |
| "grad_norm": 1.7442835569381714, | |
| "learning_rate": 4.998914100252672e-05, | |
| "loss": 2.1695, | |
| "step": 1798 | |
| }, | |
| { | |
| "epoch": 0.04582122457160036, | |
| "grad_norm": 1.704970359802246, | |
| "learning_rate": 4.998652497419696e-05, | |
| "loss": 2.1365, | |
| "step": 1829 | |
| }, | |
| { | |
| "epoch": 0.04659785549654274, | |
| "grad_norm": 1.7213023900985718, | |
| "learning_rate": 4.9983626976328927e-05, | |
| "loss": 2.1524, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.04737448642148512, | |
| "grad_norm": 4.01851224899292, | |
| "learning_rate": 4.998044704162613e-05, | |
| "loss": 2.1171, | |
| "step": 1891 | |
| }, | |
| { | |
| "epoch": 0.0481511173464275, | |
| "grad_norm": 1.5619858503341675, | |
| "learning_rate": 4.9976985205973705e-05, | |
| "loss": 2.1608, | |
| "step": 1922 | |
| }, | |
| { | |
| "epoch": 0.04892774827136988, | |
| "grad_norm": 1.684493064880371, | |
| "learning_rate": 4.997324150843799e-05, | |
| "loss": 2.1278, | |
| "step": 1953 | |
| }, | |
| { | |
| "epoch": 0.04970437919631226, | |
| "grad_norm": 1.660992980003357, | |
| "learning_rate": 4.99692159912661e-05, | |
| "loss": 2.1136, | |
| "step": 1984 | |
| }, | |
| { | |
| "epoch": 0.05048101012125464, | |
| "grad_norm": 1.5491065979003906, | |
| "learning_rate": 4.996490869988546e-05, | |
| "loss": 2.1086, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.05125764104619701, | |
| "grad_norm": 1.5400840044021606, | |
| "learning_rate": 4.996031968290326e-05, | |
| "loss": 2.1069, | |
| "step": 2046 | |
| }, | |
| { | |
| "epoch": 0.05203427197113939, | |
| "grad_norm": 1.5687416791915894, | |
| "learning_rate": 4.995544899210594e-05, | |
| "loss": 2.0888, | |
| "step": 2077 | |
| }, | |
| { | |
| "epoch": 0.05281090289608177, | |
| "grad_norm": 1.5218935012817383, | |
| "learning_rate": 4.9950296682458583e-05, | |
| "loss": 2.1032, | |
| "step": 2108 | |
| }, | |
| { | |
| "epoch": 0.05358753382102415, | |
| "grad_norm": 1.763666033744812, | |
| "learning_rate": 4.994486281210429e-05, | |
| "loss": 2.0799, | |
| "step": 2139 | |
| }, | |
| { | |
| "epoch": 0.05436416474596653, | |
| "grad_norm": 1.5606274604797363, | |
| "learning_rate": 4.9939147442363566e-05, | |
| "loss": 2.0614, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.05514079567090891, | |
| "grad_norm": 1.4900213479995728, | |
| "learning_rate": 4.9933150637733574e-05, | |
| "loss": 2.0565, | |
| "step": 2201 | |
| }, | |
| { | |
| "epoch": 0.05591742659585129, | |
| "grad_norm": 1.4454214572906494, | |
| "learning_rate": 4.992687246588743e-05, | |
| "loss": 2.0381, | |
| "step": 2232 | |
| }, | |
| { | |
| "epoch": 0.056694057520793664, | |
| "grad_norm": 1.4872742891311646, | |
| "learning_rate": 4.992031299767347e-05, | |
| "loss": 2.0227, | |
| "step": 2263 | |
| }, | |
| { | |
| "epoch": 0.057470688445736044, | |
| "grad_norm": 1.4538527727127075, | |
| "learning_rate": 4.9913472307114386e-05, | |
| "loss": 2.0395, | |
| "step": 2294 | |
| }, | |
| { | |
| "epoch": 0.058247319370678424, | |
| "grad_norm": 1.432769536972046, | |
| "learning_rate": 4.9906350471406446e-05, | |
| "loss": 2.0352, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.059023950295620804, | |
| "grad_norm": 1.4535977840423584, | |
| "learning_rate": 4.989894757091861e-05, | |
| "loss": 2.0341, | |
| "step": 2356 | |
| }, | |
| { | |
| "epoch": 0.059800581220563184, | |
| "grad_norm": 1.4004398584365845, | |
| "learning_rate": 4.989126368919158e-05, | |
| "loss": 2.0185, | |
| "step": 2387 | |
| }, | |
| { | |
| "epoch": 0.060577212145505564, | |
| "grad_norm": 1.433273434638977, | |
| "learning_rate": 4.988329891293693e-05, | |
| "loss": 2.036, | |
| "step": 2418 | |
| }, | |
| { | |
| "epoch": 0.06135384307044794, | |
| "grad_norm": 1.3791290521621704, | |
| "learning_rate": 4.987505333203608e-05, | |
| "loss": 2.0421, | |
| "step": 2449 | |
| }, | |
| { | |
| "epoch": 0.06213047399539032, | |
| "grad_norm": 2.008694648742676, | |
| "learning_rate": 4.9866527039539276e-05, | |
| "loss": 1.9948, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.0629071049203327, | |
| "grad_norm": 1.3773164749145508, | |
| "learning_rate": 4.9857720131664594e-05, | |
| "loss": 2.0158, | |
| "step": 2511 | |
| }, | |
| { | |
| "epoch": 0.06368373584527508, | |
| "grad_norm": 1.4489777088165283, | |
| "learning_rate": 4.9848632707796773e-05, | |
| "loss": 2.0216, | |
| "step": 2542 | |
| }, | |
| { | |
| "epoch": 0.06446036677021745, | |
| "grad_norm": 2.1266908645629883, | |
| "learning_rate": 4.9839264870486155e-05, | |
| "loss": 1.9962, | |
| "step": 2573 | |
| }, | |
| { | |
| "epoch": 0.06523699769515984, | |
| "grad_norm": 1.3704332113265991, | |
| "learning_rate": 4.9829616725447526e-05, | |
| "loss": 1.9801, | |
| "step": 2604 | |
| }, | |
| { | |
| "epoch": 0.06601362862010221, | |
| "grad_norm": 1.3643279075622559, | |
| "learning_rate": 4.981968838155888e-05, | |
| "loss": 1.9902, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 0.0667902595450446, | |
| "grad_norm": 1.3495409488677979, | |
| "learning_rate": 4.980947995086024e-05, | |
| "loss": 1.993, | |
| "step": 2666 | |
| }, | |
| { | |
| "epoch": 0.06756689046998697, | |
| "grad_norm": 1.3918874263763428, | |
| "learning_rate": 4.979899154855234e-05, | |
| "loss": 1.9782, | |
| "step": 2697 | |
| }, | |
| { | |
| "epoch": 0.06834352139492936, | |
| "grad_norm": 1.3205535411834717, | |
| "learning_rate": 4.9788223292995386e-05, | |
| "loss": 1.9638, | |
| "step": 2728 | |
| }, | |
| { | |
| "epoch": 0.06912015231987173, | |
| "grad_norm": 1.3123114109039307, | |
| "learning_rate": 4.977717530570768e-05, | |
| "loss": 1.9826, | |
| "step": 2759 | |
| }, | |
| { | |
| "epoch": 0.0698967832448141, | |
| "grad_norm": 1.3138995170593262, | |
| "learning_rate": 4.976584771136425e-05, | |
| "loss": 1.9938, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.07067341416975649, | |
| "grad_norm": 1.2964009046554565, | |
| "learning_rate": 4.975424063779547e-05, | |
| "loss": 1.9924, | |
| "step": 2821 | |
| }, | |
| { | |
| "epoch": 0.07145004509469886, | |
| "grad_norm": 1.3010739088058472, | |
| "learning_rate": 4.974235421598557e-05, | |
| "loss": 1.9592, | |
| "step": 2852 | |
| }, | |
| { | |
| "epoch": 0.07222667601964125, | |
| "grad_norm": 1.2549103498458862, | |
| "learning_rate": 4.973018858007122e-05, | |
| "loss": 1.9394, | |
| "step": 2883 | |
| }, | |
| { | |
| "epoch": 0.07300330694458362, | |
| "grad_norm": 1.3198585510253906, | |
| "learning_rate": 4.9717743867339963e-05, | |
| "loss": 1.989, | |
| "step": 2914 | |
| }, | |
| { | |
| "epoch": 0.07377993786952601, | |
| "grad_norm": 1.3653398752212524, | |
| "learning_rate": 4.9705020218228695e-05, | |
| "loss": 1.9499, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 0.07455656879446838, | |
| "grad_norm": 4.606905937194824, | |
| "learning_rate": 4.969201777632205e-05, | |
| "loss": 1.9504, | |
| "step": 2976 | |
| }, | |
| { | |
| "epoch": 0.07533319971941076, | |
| "grad_norm": 1.2823129892349243, | |
| "learning_rate": 4.9678736688350846e-05, | |
| "loss": 1.9483, | |
| "step": 3007 | |
| }, | |
| { | |
| "epoch": 0.07610983064435314, | |
| "grad_norm": 1.2803308963775635, | |
| "learning_rate": 4.966517710419033e-05, | |
| "loss": 1.9604, | |
| "step": 3038 | |
| }, | |
| { | |
| "epoch": 0.07688646156929552, | |
| "grad_norm": 1.2765967845916748, | |
| "learning_rate": 4.965133917685858e-05, | |
| "loss": 1.941, | |
| "step": 3069 | |
| }, | |
| { | |
| "epoch": 0.0776630924942379, | |
| "grad_norm": 2.622286796569824, | |
| "learning_rate": 4.9637223062514714e-05, | |
| "loss": 1.9426, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.07843972341918028, | |
| "grad_norm": 1.231387972831726, | |
| "learning_rate": 4.962282892045718e-05, | |
| "loss": 1.9416, | |
| "step": 3131 | |
| }, | |
| { | |
| "epoch": 0.07921635434412266, | |
| "grad_norm": 1.2499909400939941, | |
| "learning_rate": 4.9608156913121904e-05, | |
| "loss": 1.9226, | |
| "step": 3162 | |
| }, | |
| { | |
| "epoch": 0.07999298526906504, | |
| "grad_norm": 1.296276330947876, | |
| "learning_rate": 4.959320720608049e-05, | |
| "loss": 1.935, | |
| "step": 3193 | |
| }, | |
| { | |
| "epoch": 0.08076961619400741, | |
| "grad_norm": 1.2642258405685425, | |
| "learning_rate": 4.9577979968038354e-05, | |
| "loss": 1.9185, | |
| "step": 3224 | |
| }, | |
| { | |
| "epoch": 0.0815462471189498, | |
| "grad_norm": 1.2396386861801147, | |
| "learning_rate": 4.956247537083282e-05, | |
| "loss": 1.9361, | |
| "step": 3255 | |
| }, | |
| { | |
| "epoch": 0.08232287804389217, | |
| "grad_norm": 1.1811271905899048, | |
| "learning_rate": 4.9546693589431145e-05, | |
| "loss": 1.9276, | |
| "step": 3286 | |
| }, | |
| { | |
| "epoch": 0.08309950896883456, | |
| "grad_norm": 1.2897577285766602, | |
| "learning_rate": 4.9530634801928595e-05, | |
| "loss": 1.9277, | |
| "step": 3317 | |
| }, | |
| { | |
| "epoch": 0.08387613989377693, | |
| "grad_norm": 1.1453903913497925, | |
| "learning_rate": 4.9514299189546395e-05, | |
| "loss": 1.9109, | |
| "step": 3348 | |
| }, | |
| { | |
| "epoch": 0.08465277081871932, | |
| "grad_norm": 1.1600992679595947, | |
| "learning_rate": 4.949768693662973e-05, | |
| "loss": 1.9248, | |
| "step": 3379 | |
| }, | |
| { | |
| "epoch": 0.08542940174366169, | |
| "grad_norm": 1.234217882156372, | |
| "learning_rate": 4.948079823064559e-05, | |
| "loss": 1.9236, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.08620603266860406, | |
| "grad_norm": 1.1933766603469849, | |
| "learning_rate": 4.946363326218074e-05, | |
| "loss": 1.9284, | |
| "step": 3441 | |
| }, | |
| { | |
| "epoch": 0.08698266359354645, | |
| "grad_norm": 1.2087618112564087, | |
| "learning_rate": 4.9446192224939525e-05, | |
| "loss": 1.8971, | |
| "step": 3472 | |
| }, | |
| { | |
| "epoch": 0.08775929451848882, | |
| "grad_norm": 1.227864146232605, | |
| "learning_rate": 4.942847531574167e-05, | |
| "loss": 1.9353, | |
| "step": 3503 | |
| }, | |
| { | |
| "epoch": 0.08853592544343121, | |
| "grad_norm": 1.2828229665756226, | |
| "learning_rate": 4.941048273452008e-05, | |
| "loss": 1.9085, | |
| "step": 3534 | |
| }, | |
| { | |
| "epoch": 0.08931255636837358, | |
| "grad_norm": 1.2086588144302368, | |
| "learning_rate": 4.9392214684318605e-05, | |
| "loss": 1.9044, | |
| "step": 3565 | |
| }, | |
| { | |
| "epoch": 0.09008918729331597, | |
| "grad_norm": 1.1942152976989746, | |
| "learning_rate": 4.93736713712897e-05, | |
| "loss": 1.9182, | |
| "step": 3596 | |
| }, | |
| { | |
| "epoch": 0.09086581821825834, | |
| "grad_norm": 1.2399394512176514, | |
| "learning_rate": 4.9354853004692124e-05, | |
| "loss": 1.8943, | |
| "step": 3627 | |
| }, | |
| { | |
| "epoch": 0.09164244914320072, | |
| "grad_norm": 1.227376937866211, | |
| "learning_rate": 4.93357597968886e-05, | |
| "loss": 1.8881, | |
| "step": 3658 | |
| }, | |
| { | |
| "epoch": 0.0924190800681431, | |
| "grad_norm": 1.1314102411270142, | |
| "learning_rate": 4.931639196334338e-05, | |
| "loss": 1.8878, | |
| "step": 3689 | |
| }, | |
| { | |
| "epoch": 0.09319571099308548, | |
| "grad_norm": 1.192134141921997, | |
| "learning_rate": 4.9296749722619826e-05, | |
| "loss": 1.8857, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.09397234191802786, | |
| "grad_norm": 1.1905503273010254, | |
| "learning_rate": 4.9276833296377966e-05, | |
| "loss": 1.8701, | |
| "step": 3751 | |
| }, | |
| { | |
| "epoch": 0.09474897284297024, | |
| "grad_norm": 1.2335184812545776, | |
| "learning_rate": 4.925664290937196e-05, | |
| "loss": 1.8741, | |
| "step": 3782 | |
| }, | |
| { | |
| "epoch": 0.09552560376791262, | |
| "grad_norm": 1.1403234004974365, | |
| "learning_rate": 4.9236178789447576e-05, | |
| "loss": 1.8658, | |
| "step": 3813 | |
| }, | |
| { | |
| "epoch": 0.096302234692855, | |
| "grad_norm": 1.1734315156936646, | |
| "learning_rate": 4.921544116753962e-05, | |
| "loss": 1.8724, | |
| "step": 3844 | |
| }, | |
| { | |
| "epoch": 0.09707886561779737, | |
| "grad_norm": 1.2637231349945068, | |
| "learning_rate": 4.919443027766935e-05, | |
| "loss": 1.867, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 0.09785549654273976, | |
| "grad_norm": 1.1608805656433105, | |
| "learning_rate": 4.91731463569418e-05, | |
| "loss": 1.8764, | |
| "step": 3906 | |
| }, | |
| { | |
| "epoch": 0.09863212746768213, | |
| "grad_norm": 1.1562029123306274, | |
| "learning_rate": 4.915158964554312e-05, | |
| "loss": 1.8612, | |
| "step": 3937 | |
| }, | |
| { | |
| "epoch": 0.09940875839262452, | |
| "grad_norm": 1.1918668746948242, | |
| "learning_rate": 4.912976038673786e-05, | |
| "loss": 1.866, | |
| "step": 3968 | |
| }, | |
| { | |
| "epoch": 0.10018538931756689, | |
| "grad_norm": 1.180206537246704, | |
| "learning_rate": 4.9107658826866254e-05, | |
| "loss": 1.849, | |
| "step": 3999 | |
| }, | |
| { | |
| "epoch": 0.10096202024250928, | |
| "grad_norm": 1.0747199058532715, | |
| "learning_rate": 4.908528521534139e-05, | |
| "loss": 1.8547, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.10173865116745165, | |
| "grad_norm": 1.103786826133728, | |
| "learning_rate": 4.906263980464644e-05, | |
| "loss": 1.864, | |
| "step": 4061 | |
| }, | |
| { | |
| "epoch": 0.10251528209239402, | |
| "grad_norm": 1.1390912532806396, | |
| "learning_rate": 4.903972285033178e-05, | |
| "loss": 1.8669, | |
| "step": 4092 | |
| }, | |
| { | |
| "epoch": 0.10329191301733641, | |
| "grad_norm": 1.1488468647003174, | |
| "learning_rate": 4.901653461101213e-05, | |
| "loss": 1.8445, | |
| "step": 4123 | |
| }, | |
| { | |
| "epoch": 0.10406854394227878, | |
| "grad_norm": 1.1589927673339844, | |
| "learning_rate": 4.8993075348363626e-05, | |
| "loss": 1.8529, | |
| "step": 4154 | |
| }, | |
| { | |
| "epoch": 0.10484517486722117, | |
| "grad_norm": 1.1793478727340698, | |
| "learning_rate": 4.896934532712084e-05, | |
| "loss": 1.8818, | |
| "step": 4185 | |
| }, | |
| { | |
| "epoch": 0.10562180579216354, | |
| "grad_norm": 1.165292739868164, | |
| "learning_rate": 4.8945344815073846e-05, | |
| "loss": 1.8425, | |
| "step": 4216 | |
| }, | |
| { | |
| "epoch": 0.10639843671710593, | |
| "grad_norm": 1.105946660041809, | |
| "learning_rate": 4.892107408306516e-05, | |
| "loss": 1.8327, | |
| "step": 4247 | |
| }, | |
| { | |
| "epoch": 0.1071750676420483, | |
| "grad_norm": 1.1690527200698853, | |
| "learning_rate": 4.889653340498669e-05, | |
| "loss": 1.8428, | |
| "step": 4278 | |
| }, | |
| { | |
| "epoch": 0.10795169856699068, | |
| "grad_norm": 1.1106913089752197, | |
| "learning_rate": 4.8871723057776664e-05, | |
| "loss": 1.839, | |
| "step": 4309 | |
| }, | |
| { | |
| "epoch": 0.10872832949193306, | |
| "grad_norm": 1.1590090990066528, | |
| "learning_rate": 4.8846643321416476e-05, | |
| "loss": 1.8658, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.10950496041687544, | |
| "grad_norm": 1.1315875053405762, | |
| "learning_rate": 4.882129447892753e-05, | |
| "loss": 1.8282, | |
| "step": 4371 | |
| }, | |
| { | |
| "epoch": 0.11028159134181782, | |
| "grad_norm": 1.6055009365081787, | |
| "learning_rate": 4.8795676816368076e-05, | |
| "loss": 1.8294, | |
| "step": 4402 | |
| }, | |
| { | |
| "epoch": 0.1110582222667602, | |
| "grad_norm": 1.1236201524734497, | |
| "learning_rate": 4.876979062282995e-05, | |
| "loss": 1.8325, | |
| "step": 4433 | |
| }, | |
| { | |
| "epoch": 0.11183485319170258, | |
| "grad_norm": 1.0830161571502686, | |
| "learning_rate": 4.8743636190435325e-05, | |
| "loss": 1.8451, | |
| "step": 4464 | |
| }, | |
| { | |
| "epoch": 0.11261148411664496, | |
| "grad_norm": 1.1663005352020264, | |
| "learning_rate": 4.871721381433344e-05, | |
| "loss": 1.8349, | |
| "step": 4495 | |
| }, | |
| { | |
| "epoch": 0.11338811504158733, | |
| "grad_norm": 1.2522406578063965, | |
| "learning_rate": 4.869052379269719e-05, | |
| "loss": 1.8162, | |
| "step": 4526 | |
| }, | |
| { | |
| "epoch": 0.11416474596652972, | |
| "grad_norm": 1.1271142959594727, | |
| "learning_rate": 4.866356642671985e-05, | |
| "loss": 1.8348, | |
| "step": 4557 | |
| }, | |
| { | |
| "epoch": 0.11494137689147209, | |
| "grad_norm": 1.053753137588501, | |
| "learning_rate": 4.8636342020611634e-05, | |
| "loss": 1.8376, | |
| "step": 4588 | |
| }, | |
| { | |
| "epoch": 0.11571800781641448, | |
| "grad_norm": 1.1469056606292725, | |
| "learning_rate": 4.860885088159626e-05, | |
| "loss": 1.8248, | |
| "step": 4619 | |
| }, | |
| { | |
| "epoch": 0.11649463874135685, | |
| "grad_norm": 1.116128921508789, | |
| "learning_rate": 4.858109331990751e-05, | |
| "loss": 1.8454, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.11727126966629922, | |
| "grad_norm": 1.0742651224136353, | |
| "learning_rate": 4.855306964878567e-05, | |
| "loss": 1.8231, | |
| "step": 4681 | |
| }, | |
| { | |
| "epoch": 0.11804790059124161, | |
| "grad_norm": 1.101657748222351, | |
| "learning_rate": 4.8524780184474084e-05, | |
| "loss": 1.8421, | |
| "step": 4712 | |
| }, | |
| { | |
| "epoch": 0.11882453151618398, | |
| "grad_norm": 1.0891473293304443, | |
| "learning_rate": 4.8496225246215496e-05, | |
| "loss": 1.8053, | |
| "step": 4743 | |
| }, | |
| { | |
| "epoch": 0.11960116244112637, | |
| "grad_norm": 1.1044034957885742, | |
| "learning_rate": 4.8467405156248505e-05, | |
| "loss": 1.8184, | |
| "step": 4774 | |
| }, | |
| { | |
| "epoch": 0.12037779336606874, | |
| "grad_norm": 1.0644783973693848, | |
| "learning_rate": 4.843832023980392e-05, | |
| "loss": 1.8237, | |
| "step": 4805 | |
| }, | |
| { | |
| "epoch": 0.12115442429101113, | |
| "grad_norm": 1.0427252054214478, | |
| "learning_rate": 4.840897082510106e-05, | |
| "loss": 1.8189, | |
| "step": 4836 | |
| }, | |
| { | |
| "epoch": 0.1219310552159535, | |
| "grad_norm": 1.0806516408920288, | |
| "learning_rate": 4.8379357243344084e-05, | |
| "loss": 1.7958, | |
| "step": 4867 | |
| }, | |
| { | |
| "epoch": 0.12270768614089587, | |
| "grad_norm": 1.092581868171692, | |
| "learning_rate": 4.8349479828718236e-05, | |
| "loss": 1.8229, | |
| "step": 4898 | |
| }, | |
| { | |
| "epoch": 0.12348431706583826, | |
| "grad_norm": 1.0858657360076904, | |
| "learning_rate": 4.8319338918386075e-05, | |
| "loss": 1.8165, | |
| "step": 4929 | |
| }, | |
| { | |
| "epoch": 0.12426094799078063, | |
| "grad_norm": 1.073818325996399, | |
| "learning_rate": 4.828893485248369e-05, | |
| "loss": 1.8352, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.125037578915723, | |
| "grad_norm": 1.069009780883789, | |
| "learning_rate": 4.825826797411682e-05, | |
| "loss": 1.8, | |
| "step": 4991 | |
| }, | |
| { | |
| "epoch": 0.1258142098406654, | |
| "grad_norm": 1.0416266918182373, | |
| "learning_rate": 4.822733862935702e-05, | |
| "loss": 1.8224, | |
| "step": 5022 | |
| }, | |
| { | |
| "epoch": 0.12659084076560778, | |
| "grad_norm": 1.093551516532898, | |
| "learning_rate": 4.819614716723775e-05, | |
| "loss": 1.8005, | |
| "step": 5053 | |
| }, | |
| { | |
| "epoch": 0.12736747169055015, | |
| "grad_norm": 1.026353359222412, | |
| "learning_rate": 4.8164693939750425e-05, | |
| "loss": 1.844, | |
| "step": 5084 | |
| }, | |
| { | |
| "epoch": 0.12814410261549253, | |
| "grad_norm": 1.102858304977417, | |
| "learning_rate": 4.813297930184042e-05, | |
| "loss": 1.8045, | |
| "step": 5115 | |
| }, | |
| { | |
| "epoch": 0.1289207335404349, | |
| "grad_norm": 1.0537272691726685, | |
| "learning_rate": 4.810100361140314e-05, | |
| "loss": 1.8054, | |
| "step": 5146 | |
| }, | |
| { | |
| "epoch": 0.1296973644653773, | |
| "grad_norm": 1.0389387607574463, | |
| "learning_rate": 4.8068767229279885e-05, | |
| "loss": 1.7734, | |
| "step": 5177 | |
| }, | |
| { | |
| "epoch": 0.13047399539031967, | |
| "grad_norm": 1.0274319648742676, | |
| "learning_rate": 4.8036270519253854e-05, | |
| "loss": 1.788, | |
| "step": 5208 | |
| }, | |
| { | |
| "epoch": 0.13125062631526205, | |
| "grad_norm": 1.0932128429412842, | |
| "learning_rate": 4.8003513848046e-05, | |
| "loss": 1.7813, | |
| "step": 5239 | |
| }, | |
| { | |
| "epoch": 0.13202725724020442, | |
| "grad_norm": 1.0294640064239502, | |
| "learning_rate": 4.79704975853109e-05, | |
| "loss": 1.81, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 0.13280388816514682, | |
| "grad_norm": 1.0617210865020752, | |
| "learning_rate": 4.793722210363262e-05, | |
| "loss": 1.8017, | |
| "step": 5301 | |
| }, | |
| { | |
| "epoch": 0.1335805190900892, | |
| "grad_norm": 1.0657798051834106, | |
| "learning_rate": 4.7903687778520414e-05, | |
| "loss": 1.7884, | |
| "step": 5332 | |
| }, | |
| { | |
| "epoch": 0.13435715001503157, | |
| "grad_norm": 0.9954230189323425, | |
| "learning_rate": 4.7869894988404593e-05, | |
| "loss": 1.7848, | |
| "step": 5363 | |
| }, | |
| { | |
| "epoch": 0.13513378093997394, | |
| "grad_norm": 1.117066740989685, | |
| "learning_rate": 4.783584411463221e-05, | |
| "loss": 1.7984, | |
| "step": 5394 | |
| }, | |
| { | |
| "epoch": 0.13591041186491631, | |
| "grad_norm": 1.0706870555877686, | |
| "learning_rate": 4.780153554146274e-05, | |
| "loss": 1.8008, | |
| "step": 5425 | |
| }, | |
| { | |
| "epoch": 0.13668704278985871, | |
| "grad_norm": 1.063238501548767, | |
| "learning_rate": 4.7766969656063766e-05, | |
| "loss": 1.7754, | |
| "step": 5456 | |
| }, | |
| { | |
| "epoch": 0.1374636737148011, | |
| "grad_norm": 1.0702152252197266, | |
| "learning_rate": 4.773214684850662e-05, | |
| "loss": 1.8037, | |
| "step": 5487 | |
| }, | |
| { | |
| "epoch": 0.13824030463974346, | |
| "grad_norm": 1.0318788290023804, | |
| "learning_rate": 4.769706751176193e-05, | |
| "loss": 1.8174, | |
| "step": 5518 | |
| }, | |
| { | |
| "epoch": 0.13901693556468583, | |
| "grad_norm": 1.0097755193710327, | |
| "learning_rate": 4.7661732041695264e-05, | |
| "loss": 1.7648, | |
| "step": 5549 | |
| }, | |
| { | |
| "epoch": 0.1397935664896282, | |
| "grad_norm": 1.063873291015625, | |
| "learning_rate": 4.762614083706258e-05, | |
| "loss": 1.809, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 0.1405701974145706, | |
| "grad_norm": 1.0409623384475708, | |
| "learning_rate": 4.759029429950581e-05, | |
| "loss": 1.7853, | |
| "step": 5611 | |
| }, | |
| { | |
| "epoch": 0.14134682833951298, | |
| "grad_norm": 1.0131454467773438, | |
| "learning_rate": 4.7554192833548235e-05, | |
| "loss": 1.7759, | |
| "step": 5642 | |
| }, | |
| { | |
| "epoch": 0.14212345926445535, | |
| "grad_norm": 1.1056182384490967, | |
| "learning_rate": 4.751783684659e-05, | |
| "loss": 1.7959, | |
| "step": 5673 | |
| }, | |
| { | |
| "epoch": 0.14290009018939773, | |
| "grad_norm": 1.0572400093078613, | |
| "learning_rate": 4.748122674890348e-05, | |
| "loss": 1.7597, | |
| "step": 5704 | |
| }, | |
| { | |
| "epoch": 0.14367672111434013, | |
| "grad_norm": 1.0375362634658813, | |
| "learning_rate": 4.7444362953628654e-05, | |
| "loss": 1.7719, | |
| "step": 5735 | |
| }, | |
| { | |
| "epoch": 0.1444533520392825, | |
| "grad_norm": 1.0238806009292603, | |
| "learning_rate": 4.7407245876768424e-05, | |
| "loss": 1.7966, | |
| "step": 5766 | |
| }, | |
| { | |
| "epoch": 0.14522998296422487, | |
| "grad_norm": 1.0335805416107178, | |
| "learning_rate": 4.736987593718397e-05, | |
| "loss": 1.7805, | |
| "step": 5797 | |
| }, | |
| { | |
| "epoch": 0.14600661388916725, | |
| "grad_norm": 1.08907151222229, | |
| "learning_rate": 4.733225355658999e-05, | |
| "loss": 1.7905, | |
| "step": 5828 | |
| }, | |
| { | |
| "epoch": 0.14678324481410962, | |
| "grad_norm": 1.0151680707931519, | |
| "learning_rate": 4.7294379159549926e-05, | |
| "loss": 1.7766, | |
| "step": 5859 | |
| }, | |
| { | |
| "epoch": 0.14755987573905202, | |
| "grad_norm": 0.9963664412498474, | |
| "learning_rate": 4.725625317347119e-05, | |
| "loss": 1.7708, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 0.1483365066639944, | |
| "grad_norm": 1.0762684345245361, | |
| "learning_rate": 4.7217876028600374e-05, | |
| "loss": 1.7998, | |
| "step": 5921 | |
| }, | |
| { | |
| "epoch": 0.14911313758893677, | |
| "grad_norm": 1.1151126623153687, | |
| "learning_rate": 4.717924815801832e-05, | |
| "loss": 1.782, | |
| "step": 5952 | |
| }, | |
| { | |
| "epoch": 0.14988976851387914, | |
| "grad_norm": 0.9970519542694092, | |
| "learning_rate": 4.714036999763532e-05, | |
| "loss": 1.7779, | |
| "step": 5983 | |
| }, | |
| { | |
| "epoch": 0.1506663994388215, | |
| "grad_norm": 1.0146524906158447, | |
| "learning_rate": 4.7101241986186116e-05, | |
| "loss": 1.7631, | |
| "step": 6014 | |
| }, | |
| { | |
| "epoch": 0.15144303036376391, | |
| "grad_norm": 1.0554558038711548, | |
| "learning_rate": 4.7061864565225e-05, | |
| "loss": 1.7729, | |
| "step": 6045 | |
| }, | |
| { | |
| "epoch": 0.1522196612887063, | |
| "grad_norm": 1.0413624048233032, | |
| "learning_rate": 4.702223817912081e-05, | |
| "loss": 1.79, | |
| "step": 6076 | |
| } | |
| ], | |
| "logging_steps": 31, | |
| "max_steps": 30517, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 3052, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.263370839061168e+18, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |