| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 200, | |
| "global_step": 812, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0012315270935960591, | |
| "grad_norm": 53.61643111350721, | |
| "learning_rate": 1.2195121951219514e-07, | |
| "loss": 4.2831, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.006157635467980296, | |
| "grad_norm": 48.00417504001698, | |
| "learning_rate": 6.097560975609757e-07, | |
| "loss": 4.1517, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.012315270935960592, | |
| "grad_norm": 48.656126717388894, | |
| "learning_rate": 1.2195121951219514e-06, | |
| "loss": 4.1506, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01847290640394089, | |
| "grad_norm": 52.06244750805184, | |
| "learning_rate": 1.8292682926829268e-06, | |
| "loss": 4.0921, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.024630541871921183, | |
| "grad_norm": 41.842979233989844, | |
| "learning_rate": 2.4390243902439027e-06, | |
| "loss": 3.832, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03078817733990148, | |
| "grad_norm": 26.054139160186445, | |
| "learning_rate": 3.0487804878048782e-06, | |
| "loss": 3.4798, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03694581280788178, | |
| "grad_norm": 22.768708360256497, | |
| "learning_rate": 3.6585365853658537e-06, | |
| "loss": 3.1223, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04310344827586207, | |
| "grad_norm": 11.807556286649826, | |
| "learning_rate": 4.268292682926829e-06, | |
| "loss": 2.774, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.04926108374384237, | |
| "grad_norm": 10.768737194323332, | |
| "learning_rate": 4.8780487804878055e-06, | |
| "loss": 2.4807, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.05541871921182266, | |
| "grad_norm": 7.978916355789078, | |
| "learning_rate": 4.999667943970417e-06, | |
| "loss": 2.3202, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.06157635467980296, | |
| "grad_norm": 6.323820375851387, | |
| "learning_rate": 4.9983191175272635e-06, | |
| "loss": 2.1276, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06773399014778325, | |
| "grad_norm": 5.180335911696107, | |
| "learning_rate": 4.995933326588439e-06, | |
| "loss": 1.9964, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.07389162561576355, | |
| "grad_norm": 4.28298322285663, | |
| "learning_rate": 4.992511561411944e-06, | |
| "loss": 1.8959, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08004926108374384, | |
| "grad_norm": 3.8950053865689425, | |
| "learning_rate": 4.988055242252294e-06, | |
| "loss": 1.7978, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.08620689655172414, | |
| "grad_norm": 3.5678308957701725, | |
| "learning_rate": 4.982566218771011e-06, | |
| "loss": 1.7083, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.09236453201970443, | |
| "grad_norm": 3.280549835228229, | |
| "learning_rate": 4.976046769268905e-06, | |
| "loss": 1.6391, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.09852216748768473, | |
| "grad_norm": 3.0095284508920943, | |
| "learning_rate": 4.968499599740427e-06, | |
| "loss": 1.6052, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.10467980295566502, | |
| "grad_norm": 2.728976091948925, | |
| "learning_rate": 4.959927842750501e-06, | |
| "loss": 1.5852, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.11083743842364532, | |
| "grad_norm": 2.773190869191402, | |
| "learning_rate": 4.9503350561343165e-06, | |
| "loss": 1.5389, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.11699507389162561, | |
| "grad_norm": 2.495284943078218, | |
| "learning_rate": 4.939725221520586e-06, | |
| "loss": 1.529, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.12315270935960591, | |
| "grad_norm": 2.2585070403096448, | |
| "learning_rate": 4.928102742678918e-06, | |
| "loss": 1.5012, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.12931034482758622, | |
| "grad_norm": 2.3030504202779203, | |
| "learning_rate": 4.915472443691962e-06, | |
| "loss": 1.5228, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.1354679802955665, | |
| "grad_norm": 2.139871025358791, | |
| "learning_rate": 4.901839566953105e-06, | |
| "loss": 1.4999, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.1416256157635468, | |
| "grad_norm": 2.041219388782717, | |
| "learning_rate": 4.887209770990537e-06, | |
| "loss": 1.4933, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.1477832512315271, | |
| "grad_norm": 1.943568456417746, | |
| "learning_rate": 4.871589128118593e-06, | |
| "loss": 1.4794, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1539408866995074, | |
| "grad_norm": 1.9973156144136615, | |
| "learning_rate": 4.854984121917349e-06, | |
| "loss": 1.4658, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.16009852216748768, | |
| "grad_norm": 2.0438506441497934, | |
| "learning_rate": 4.83740164454152e-06, | |
| "loss": 1.4371, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.16625615763546797, | |
| "grad_norm": 1.856918766416631, | |
| "learning_rate": 4.8188489938597635e-06, | |
| "loss": 1.4343, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 1.815968456698851, | |
| "learning_rate": 4.799333870425594e-06, | |
| "loss": 1.436, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.17857142857142858, | |
| "grad_norm": 1.833214631845253, | |
| "learning_rate": 4.778864374281147e-06, | |
| "loss": 1.4358, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.18472906403940886, | |
| "grad_norm": 1.7361805528112662, | |
| "learning_rate": 4.757449001595149e-06, | |
| "loss": 1.4141, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.19088669950738915, | |
| "grad_norm": 1.663939988626721, | |
| "learning_rate": 4.735096641136443e-06, | |
| "loss": 1.4235, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.19704433497536947, | |
| "grad_norm": 1.6174000907070356, | |
| "learning_rate": 4.711816570584583e-06, | |
| "loss": 1.4094, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.20320197044334976, | |
| "grad_norm": 1.5393337213532907, | |
| "learning_rate": 4.687618452678986e-06, | |
| "loss": 1.423, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.20935960591133004, | |
| "grad_norm": 1.6022576546137135, | |
| "learning_rate": 4.662512331208277e-06, | |
| "loss": 1.4229, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.21551724137931033, | |
| "grad_norm": 1.4720779248871585, | |
| "learning_rate": 4.636508626841458e-06, | |
| "loss": 1.3835, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.22167487684729065, | |
| "grad_norm": 1.4933242533565454, | |
| "learning_rate": 4.609618132802661e-06, | |
| "loss": 1.42, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.22783251231527094, | |
| "grad_norm": 1.513300883824977, | |
| "learning_rate": 4.581852010391253e-06, | |
| "loss": 1.3995, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.23399014778325122, | |
| "grad_norm": 1.4476120919655675, | |
| "learning_rate": 4.55322178434918e-06, | |
| "loss": 1.381, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.24014778325123154, | |
| "grad_norm": 1.3952114387215153, | |
| "learning_rate": 4.523739338077443e-06, | |
| "loss": 1.3886, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.24630541871921183, | |
| "grad_norm": 1.3697029918579813, | |
| "learning_rate": 4.493416908703725e-06, | |
| "loss": 1.3939, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.24630541871921183, | |
| "eval_loss": 1.3920977115631104, | |
| "eval_runtime": 145.3824, | |
| "eval_samples_per_second": 158.96, | |
| "eval_steps_per_second": 2.49, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2524630541871921, | |
| "grad_norm": 1.2995335774783041, | |
| "learning_rate": 4.462267082003183e-06, | |
| "loss": 1.3624, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.25862068965517243, | |
| "grad_norm": 1.3420631400831042, | |
| "learning_rate": 4.430302787174535e-06, | |
| "loss": 1.3932, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2647783251231527, | |
| "grad_norm": 1.249545343253794, | |
| "learning_rate": 4.397537291473607e-06, | |
| "loss": 1.36, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.270935960591133, | |
| "grad_norm": 1.2639981922437173, | |
| "learning_rate": 4.36398419470656e-06, | |
| "loss": 1.3546, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2770935960591133, | |
| "grad_norm": 1.3038810090775859, | |
| "learning_rate": 4.329657423585088e-06, | |
| "loss": 1.3748, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.2832512315270936, | |
| "grad_norm": 1.2680995555961931, | |
| "learning_rate": 4.2945712259459245e-06, | |
| "loss": 1.3566, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2894088669950739, | |
| "grad_norm": 1.1649048616562434, | |
| "learning_rate": 4.258740164837079e-06, | |
| "loss": 1.3858, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.2955665024630542, | |
| "grad_norm": 1.1320815166399936, | |
| "learning_rate": 4.222179112473211e-06, | |
| "loss": 1.3447, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3017241379310345, | |
| "grad_norm": 1.1611812170906886, | |
| "learning_rate": 4.184903244062709e-06, | |
| "loss": 1.3779, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.3078817733990148, | |
| "grad_norm": 1.1257135240111318, | |
| "learning_rate": 4.146928031508988e-06, | |
| "loss": 1.3279, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.31403940886699505, | |
| "grad_norm": 1.1200758606679568, | |
| "learning_rate": 4.108269236988648e-06, | |
| "loss": 1.3651, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.32019704433497537, | |
| "grad_norm": 0.9671290050907145, | |
| "learning_rate": 4.068942906409148e-06, | |
| "loss": 1.3496, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3263546798029557, | |
| "grad_norm": 1.0185962333371894, | |
| "learning_rate": 4.028965362748714e-06, | |
| "loss": 1.3436, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.33251231527093594, | |
| "grad_norm": 0.978818844718456, | |
| "learning_rate": 3.988353199281242e-06, | |
| "loss": 1.3178, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.33866995073891626, | |
| "grad_norm": 0.973392817512532, | |
| "learning_rate": 3.9471232726890185e-06, | |
| "loss": 1.3292, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 1.0993106332736733, | |
| "learning_rate": 3.905292696066095e-06, | |
| "loss": 1.3295, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.35098522167487683, | |
| "grad_norm": 1.0275184655442997, | |
| "learning_rate": 3.862878831815253e-06, | |
| "loss": 1.3583, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.35714285714285715, | |
| "grad_norm": 0.9854194583937018, | |
| "learning_rate": 3.8198992844414736e-06, | |
| "loss": 1.2935, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3633004926108374, | |
| "grad_norm": 0.9611577984967152, | |
| "learning_rate": 3.7763718932449322e-06, | |
| "loss": 1.3332, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3694581280788177, | |
| "grad_norm": 1.0605919781420619, | |
| "learning_rate": 3.7323147249165255e-06, | |
| "loss": 1.3076, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.37561576354679804, | |
| "grad_norm": 0.9263868172490802, | |
| "learning_rate": 3.6877460660390257e-06, | |
| "loss": 1.309, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.3817733990147783, | |
| "grad_norm": 0.9572326588734382, | |
| "learning_rate": 3.6426844154969654e-06, | |
| "loss": 1.3042, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3879310344827586, | |
| "grad_norm": 0.9676601659166285, | |
| "learning_rate": 3.597148476798393e-06, | |
| "loss": 1.3091, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.39408866995073893, | |
| "grad_norm": 0.9053200553500108, | |
| "learning_rate": 3.5511571503117125e-06, | |
| "loss": 1.3234, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.4002463054187192, | |
| "grad_norm": 0.8972007834782274, | |
| "learning_rate": 3.504729525420798e-06, | |
| "loss": 1.2932, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.4064039408866995, | |
| "grad_norm": 0.8726544346292117, | |
| "learning_rate": 3.4578848726016646e-06, | |
| "loss": 1.3175, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.4125615763546798, | |
| "grad_norm": 0.9041763575954129, | |
| "learning_rate": 3.410642635423967e-06, | |
| "loss": 1.3192, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.4187192118226601, | |
| "grad_norm": 0.8556502970513064, | |
| "learning_rate": 3.3630224224806616e-06, | |
| "loss": 1.3183, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.4248768472906404, | |
| "grad_norm": 0.8626177859113207, | |
| "learning_rate": 3.3150439992491667e-06, | |
| "loss": 1.3105, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.43103448275862066, | |
| "grad_norm": 0.7967188042404322, | |
| "learning_rate": 3.2667272798874038e-06, | |
| "loss": 1.2897, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.437192118226601, | |
| "grad_norm": 0.8856403226277132, | |
| "learning_rate": 3.2180923189681367e-06, | |
| "loss": 1.2776, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.4433497536945813, | |
| "grad_norm": 0.9102748822081079, | |
| "learning_rate": 3.169159303155017e-06, | |
| "loss": 1.3281, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.44950738916256155, | |
| "grad_norm": 0.8702706004067514, | |
| "learning_rate": 3.119948542823812e-06, | |
| "loss": 1.2999, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.45566502463054187, | |
| "grad_norm": 0.8391791004394836, | |
| "learning_rate": 3.0704804636322818e-06, | |
| "loss": 1.3013, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4618226600985222, | |
| "grad_norm": 0.9450784930448292, | |
| "learning_rate": 3.0207755980422036e-06, | |
| "loss": 1.3009, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.46798029556650245, | |
| "grad_norm": 0.8364655680922604, | |
| "learning_rate": 2.970854576797073e-06, | |
| "loss": 1.3081, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.47413793103448276, | |
| "grad_norm": 0.8637495993307496, | |
| "learning_rate": 2.920738120359e-06, | |
| "loss": 1.2983, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.4802955665024631, | |
| "grad_norm": 0.8350897912388913, | |
| "learning_rate": 2.8704470303083808e-06, | |
| "loss": 1.3022, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.48645320197044334, | |
| "grad_norm": 0.8003999962290238, | |
| "learning_rate": 2.820002180709888e-06, | |
| "loss": 1.2976, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.49261083743842365, | |
| "grad_norm": 0.8205844355868652, | |
| "learning_rate": 2.769424509448379e-06, | |
| "loss": 1.2806, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.49261083743842365, | |
| "eval_loss": 1.3155713081359863, | |
| "eval_runtime": 145.3401, | |
| "eval_samples_per_second": 159.006, | |
| "eval_steps_per_second": 2.491, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4987684729064039, | |
| "grad_norm": 0.7401769827680776, | |
| "learning_rate": 2.7187350095383197e-06, | |
| "loss": 1.2898, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.5049261083743842, | |
| "grad_norm": 0.7932407776274949, | |
| "learning_rate": 2.6679547204103174e-06, | |
| "loss": 1.2932, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.5110837438423645, | |
| "grad_norm": 0.746144620956253, | |
| "learning_rate": 2.617104719178395e-06, | |
| "loss": 1.2883, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 0.8276505969533728, | |
| "learning_rate": 2.566206111891621e-06, | |
| "loss": 1.3198, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.5233990147783252, | |
| "grad_norm": 0.7534012683391147, | |
| "learning_rate": 2.5152800247737293e-06, | |
| "loss": 1.3157, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.5295566502463054, | |
| "grad_norm": 0.7409765493416938, | |
| "learning_rate": 2.4643475954543657e-06, | |
| "loss": 1.2899, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5357142857142857, | |
| "grad_norm": 0.7291610138959701, | |
| "learning_rate": 2.413429964195603e-06, | |
| "loss": 1.3, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.541871921182266, | |
| "grad_norm": 0.8169273699015402, | |
| "learning_rate": 2.362548265117355e-06, | |
| "loss": 1.309, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5480295566502463, | |
| "grad_norm": 0.7758419122658194, | |
| "learning_rate": 2.3117236174253556e-06, | |
| "loss": 1.2992, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.5541871921182266, | |
| "grad_norm": 0.8040315305923249, | |
| "learning_rate": 2.260977116645306e-06, | |
| "loss": 1.2885, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.5603448275862069, | |
| "grad_norm": 0.7548655355086679, | |
| "learning_rate": 2.2103298258668755e-06, | |
| "loss": 1.3115, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.5665024630541872, | |
| "grad_norm": 0.7357816983542891, | |
| "learning_rate": 2.1598027670011427e-06, | |
| "loss": 1.3013, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5726600985221675, | |
| "grad_norm": 0.7152977794562079, | |
| "learning_rate": 2.109416912055145e-06, | |
| "loss": 1.2661, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.5788177339901478, | |
| "grad_norm": 0.7449902853108209, | |
| "learning_rate": 2.0591931744271343e-06, | |
| "loss": 1.29, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5849753694581281, | |
| "grad_norm": 0.6971165726597337, | |
| "learning_rate": 2.009152400226159e-06, | |
| "loss": 1.3252, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.5911330049261084, | |
| "grad_norm": 0.707778592285853, | |
| "learning_rate": 1.959315359619575e-06, | |
| "loss": 1.3133, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.5972906403940886, | |
| "grad_norm": 0.7260676752365064, | |
| "learning_rate": 1.909702738212082e-06, | |
| "loss": 1.2915, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.603448275862069, | |
| "grad_norm": 0.7872640671088219, | |
| "learning_rate": 1.860335128459853e-06, | |
| "loss": 1.2664, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6096059113300493, | |
| "grad_norm": 0.7299570473994874, | |
| "learning_rate": 1.8112330211233345e-06, | |
| "loss": 1.3039, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.6157635467980296, | |
| "grad_norm": 0.6628505663907466, | |
| "learning_rate": 1.7624167967622435e-06, | |
| "loss": 1.2999, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.6219211822660099, | |
| "grad_norm": 0.7138940590397398, | |
| "learning_rate": 1.7139067172763182e-06, | |
| "loss": 1.2828, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.6280788177339901, | |
| "grad_norm": 0.7291370506888436, | |
| "learning_rate": 1.6657229174953165e-06, | |
| "loss": 1.2994, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.6342364532019704, | |
| "grad_norm": 0.7164829111988928, | |
| "learning_rate": 1.6178853968217507e-06, | |
| "loss": 1.2856, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.6403940886699507, | |
| "grad_norm": 0.7191968032340825, | |
| "learning_rate": 1.5704140109298446e-06, | |
| "loss": 1.2698, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.646551724137931, | |
| "grad_norm": 0.6251465309812664, | |
| "learning_rate": 1.5233284635241335e-06, | |
| "loss": 1.2933, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.6527093596059114, | |
| "grad_norm": 0.6568883358981559, | |
| "learning_rate": 1.4766482981611538e-06, | |
| "loss": 1.329, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.6588669950738916, | |
| "grad_norm": 0.6556665876586427, | |
| "learning_rate": 1.4303928901375975e-06, | |
| "loss": 1.3021, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.6650246305418719, | |
| "grad_norm": 0.7081746689649916, | |
| "learning_rate": 1.3845814384483069e-06, | |
| "loss": 1.2891, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.6711822660098522, | |
| "grad_norm": 0.701781476378995, | |
| "learning_rate": 1.3392329578174495e-06, | |
| "loss": 1.2794, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.6773399014778325, | |
| "grad_norm": 0.6824594950934834, | |
| "learning_rate": 1.2943662708061677e-06, | |
| "loss": 1.2819, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.6834975369458128, | |
| "grad_norm": 0.6822805531132496, | |
| "learning_rate": 1.2500000000000007e-06, | |
| "loss": 1.2888, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 0.7121381471617909, | |
| "learning_rate": 1.2061525602792994e-06, | |
| "loss": 1.298, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6958128078817734, | |
| "grad_norm": 0.7156140232396447, | |
| "learning_rate": 1.1628421511758623e-06, | |
| "loss": 1.2879, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.7019704433497537, | |
| "grad_norm": 0.6633266964663513, | |
| "learning_rate": 1.1200867493189418e-06, | |
| "loss": 1.2756, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.708128078817734, | |
| "grad_norm": 0.7027669823672923, | |
| "learning_rate": 1.0779041009737814e-06, | |
| "loss": 1.2593, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 0.6782260773686245, | |
| "learning_rate": 1.036311714675766e-06, | |
| "loss": 1.2915, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7204433497536946, | |
| "grad_norm": 0.6853410422742611, | |
| "learning_rate": 9.953268539632373e-07, | |
| "loss": 1.2555, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.7266009852216748, | |
| "grad_norm": 0.6562363208181954, | |
| "learning_rate": 9.549665302120146e-07, | |
| "loss": 1.3124, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.7327586206896551, | |
| "grad_norm": 0.6576311894970724, | |
| "learning_rate": 9.152474955745622e-07, | |
| "loss": 1.2861, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.7389162561576355, | |
| "grad_norm": 0.6870118556621342, | |
| "learning_rate": 8.761862360267637e-07, | |
| "loss": 1.2827, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.7389162561576355, | |
| "eval_loss": 1.2938079833984375, | |
| "eval_runtime": 145.3232, | |
| "eval_samples_per_second": 159.025, | |
| "eval_steps_per_second": 2.491, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.7450738916256158, | |
| "grad_norm": 0.6499954525297986, | |
| "learning_rate": 8.377989645251719e-07, | |
| "loss": 1.2891, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.7512315270935961, | |
| "grad_norm": 0.670520502123996, | |
| "learning_rate": 8.001016142775788e-07, | |
| "loss": 1.2926, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.7573891625615764, | |
| "grad_norm": 0.6283287368331439, | |
| "learning_rate": 7.631098321297043e-07, | |
| "loss": 1.2568, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.7635467980295566, | |
| "grad_norm": 0.6284975248447452, | |
| "learning_rate": 7.26838972070737e-07, | |
| "loss": 1.2775, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.7697044334975369, | |
| "grad_norm": 0.660893437889715, | |
| "learning_rate": 6.913040888604319e-07, | |
| "loss": 1.2773, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.7758620689655172, | |
| "grad_norm": 0.6534843100770891, | |
| "learning_rate": 6.565199317804119e-07, | |
| "loss": 1.3018, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.7820197044334976, | |
| "grad_norm": 0.5870111446293268, | |
| "learning_rate": 6.22500938512256e-07, | |
| "loss": 1.2605, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.7881773399014779, | |
| "grad_norm": 0.6650501710983314, | |
| "learning_rate": 5.892612291449285e-07, | |
| "loss": 1.2832, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.7943349753694581, | |
| "grad_norm": 0.6867995221706822, | |
| "learning_rate": 5.568146003140224e-07, | |
| "loss": 1.28, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.8004926108374384, | |
| "grad_norm": 0.5896681713601163, | |
| "learning_rate": 5.251745194752622e-07, | |
| "loss": 1.2587, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.8066502463054187, | |
| "grad_norm": 0.6717699104805078, | |
| "learning_rate": 4.943541193146359e-07, | |
| "loss": 1.2477, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.812807881773399, | |
| "grad_norm": 0.7052198718541833, | |
| "learning_rate": 4.6436619229748043e-07, | |
| "loss": 1.2522, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.8189655172413793, | |
| "grad_norm": 0.6158705019186339, | |
| "learning_rate": 4.352231853587763e-07, | |
| "loss": 1.256, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.8251231527093597, | |
| "grad_norm": 0.6548358452202809, | |
| "learning_rate": 4.069371947368619e-07, | |
| "loss": 1.2963, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.8312807881773399, | |
| "grad_norm": 0.6535540639408235, | |
| "learning_rate": 3.795199609527117e-07, | |
| "loss": 1.2823, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.8374384236453202, | |
| "grad_norm": 0.6989573890022164, | |
| "learning_rate": 3.529828639368568e-07, | |
| "loss": 1.2919, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.8435960591133005, | |
| "grad_norm": 0.6428403916541138, | |
| "learning_rate": 3.273369183059782e-07, | |
| "loss": 1.2985, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.8497536945812808, | |
| "grad_norm": 0.6934651562859432, | |
| "learning_rate": 3.025927687911229e-07, | |
| "loss": 1.3011, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.8559113300492611, | |
| "grad_norm": 0.6591962271180641, | |
| "learning_rate": 2.7876068581945195e-07, | |
| "loss": 1.265, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.8620689655172413, | |
| "grad_norm": 0.590195214936619, | |
| "learning_rate": 2.558505612513479e-07, | |
| "loss": 1.2809, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.8682266009852216, | |
| "grad_norm": 0.6115453386704671, | |
| "learning_rate": 2.3387190427464817e-07, | |
| "loss": 1.2664, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.874384236453202, | |
| "grad_norm": 0.7043010144385191, | |
| "learning_rate": 2.1283383745771853e-07, | |
| "loss": 1.2946, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.8805418719211823, | |
| "grad_norm": 0.6609378479773281, | |
| "learning_rate": 1.9274509296299315e-07, | |
| "loss": 1.2976, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.8866995073891626, | |
| "grad_norm": 0.6582696629201034, | |
| "learning_rate": 1.736140089225613e-07, | |
| "loss": 1.2638, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.8928571428571429, | |
| "grad_norm": 0.6835787335143644, | |
| "learning_rate": 1.554485259773017e-07, | |
| "loss": 1.2738, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.8990147783251231, | |
| "grad_norm": 0.662487948050227, | |
| "learning_rate": 1.3825618398100232e-07, | |
| "loss": 1.28, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.9051724137931034, | |
| "grad_norm": 0.687432804005716, | |
| "learning_rate": 1.2204411887083072e-07, | |
| "loss": 1.2783, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.9113300492610837, | |
| "grad_norm": 0.7021729312071676, | |
| "learning_rate": 1.068190597054583e-07, | |
| "loss": 1.2964, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.9174876847290641, | |
| "grad_norm": 0.6515658253201944, | |
| "learning_rate": 9.25873258720636e-08, | |
| "loss": 1.2762, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.9236453201970444, | |
| "grad_norm": 0.6550075584930779, | |
| "learning_rate": 7.935482446337628e-08, | |
| "loss": 1.2812, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.9298029556650246, | |
| "grad_norm": 0.6848726797074379, | |
| "learning_rate": 6.712704782585206e-08, | |
| "loss": 1.2735, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.9359605911330049, | |
| "grad_norm": 0.6302612182103497, | |
| "learning_rate": 5.590907127999173e-08, | |
| "loss": 1.2659, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.9421182266009852, | |
| "grad_norm": 0.6297453401962552, | |
| "learning_rate": 4.570555101375357e-08, | |
| "loss": 1.2616, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.9482758620689655, | |
| "grad_norm": 0.6875738096430432, | |
| "learning_rate": 3.652072214993335e-08, | |
| "loss": 1.2823, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.9544334975369458, | |
| "grad_norm": 0.6691771389169094, | |
| "learning_rate": 2.8358396988315272e-08, | |
| "loss": 1.2823, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.9605911330049262, | |
| "grad_norm": 0.6162495535352837, | |
| "learning_rate": 2.122196342331767e-08, | |
| "loss": 1.2605, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.9667487684729064, | |
| "grad_norm": 0.6765789960335756, | |
| "learning_rate": 1.511438353779898e-08, | |
| "loss": 1.2758, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.9729064039408867, | |
| "grad_norm": 0.6926059474276152, | |
| "learning_rate": 1.0038192373600652e-08, | |
| "loss": 1.2591, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.979064039408867, | |
| "grad_norm": 0.6413165173071386, | |
| "learning_rate": 5.995496879339924e-09, | |
| "loss": 1.2875, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.9852216748768473, | |
| "grad_norm": 0.6372683334165604, | |
| "learning_rate": 2.9879750358896565e-09, | |
| "loss": 1.2807, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.9852216748768473, | |
| "eval_loss": 1.2902166843414307, | |
| "eval_runtime": 145.3653, | |
| "eval_samples_per_second": 158.979, | |
| "eval_steps_per_second": 2.49, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.9913793103448276, | |
| "grad_norm": 0.5963019194626616, | |
| "learning_rate": 1.0168751599085035e-09, | |
| "loss": 1.2782, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.9975369458128078, | |
| "grad_norm": 0.6176142023053375, | |
| "learning_rate": 8.301538570676393e-11, | |
| "loss": 1.2582, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 812, | |
| "total_flos": 277556883980288.0, | |
| "train_loss": 1.458759990231744, | |
| "train_runtime": 5428.2383, | |
| "train_samples_per_second": 38.293, | |
| "train_steps_per_second": 0.15 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 812, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 1000000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 277556883980288.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |