dannydxj's picture
Upload folder using huggingface_hub
2075a5f verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 15.0,
"eval_steps": 500,
"global_step": 930,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.08064516129032258,
"grad_norm": 7.9788904562924206,
"learning_rate": 2.1505376344086024e-07,
"loss": 0.7746,
"step": 5
},
{
"epoch": 0.16129032258064516,
"grad_norm": 9.521156430604261,
"learning_rate": 4.838709677419355e-07,
"loss": 0.7594,
"step": 10
},
{
"epoch": 0.24193548387096775,
"grad_norm": 6.990008524163482,
"learning_rate": 7.526881720430108e-07,
"loss": 0.7177,
"step": 15
},
{
"epoch": 0.3225806451612903,
"grad_norm": 5.245105441222518,
"learning_rate": 1.021505376344086e-06,
"loss": 0.622,
"step": 20
},
{
"epoch": 0.4032258064516129,
"grad_norm": 6.050490163797067,
"learning_rate": 1.2903225806451614e-06,
"loss": 0.5555,
"step": 25
},
{
"epoch": 0.4838709677419355,
"grad_norm": 4.382694958544679,
"learning_rate": 1.5591397849462367e-06,
"loss": 0.5051,
"step": 30
},
{
"epoch": 0.5645161290322581,
"grad_norm": 3.5106557630691793,
"learning_rate": 1.827956989247312e-06,
"loss": 0.4399,
"step": 35
},
{
"epoch": 0.6451612903225806,
"grad_norm": 3.6811492921662814,
"learning_rate": 2.096774193548387e-06,
"loss": 0.4063,
"step": 40
},
{
"epoch": 0.7258064516129032,
"grad_norm": 3.441163884743535,
"learning_rate": 2.3655913978494625e-06,
"loss": 0.3723,
"step": 45
},
{
"epoch": 0.8064516129032258,
"grad_norm": 3.447581913907039,
"learning_rate": 2.634408602150538e-06,
"loss": 0.4851,
"step": 50
},
{
"epoch": 0.8870967741935484,
"grad_norm": 3.675136115986037,
"learning_rate": 2.903225806451613e-06,
"loss": 0.4024,
"step": 55
},
{
"epoch": 0.967741935483871,
"grad_norm": 3.0780394937329274,
"learning_rate": 3.1720430107526885e-06,
"loss": 0.413,
"step": 60
},
{
"epoch": 1.0483870967741935,
"grad_norm": 3.2116900427519384,
"learning_rate": 3.440860215053764e-06,
"loss": 0.3071,
"step": 65
},
{
"epoch": 1.129032258064516,
"grad_norm": 4.009148207400134,
"learning_rate": 3.7096774193548392e-06,
"loss": 0.3085,
"step": 70
},
{
"epoch": 1.2096774193548387,
"grad_norm": 2.8388506596193777,
"learning_rate": 3.978494623655914e-06,
"loss": 0.3009,
"step": 75
},
{
"epoch": 1.2903225806451613,
"grad_norm": 3.328140626191503,
"learning_rate": 4.2473118279569895e-06,
"loss": 0.3181,
"step": 80
},
{
"epoch": 1.370967741935484,
"grad_norm": 4.027543468539502,
"learning_rate": 4.516129032258065e-06,
"loss": 0.2963,
"step": 85
},
{
"epoch": 1.4516129032258065,
"grad_norm": 3.2636669590920286,
"learning_rate": 4.78494623655914e-06,
"loss": 0.2792,
"step": 90
},
{
"epoch": 1.532258064516129,
"grad_norm": 3.5992606337843487,
"learning_rate": 5.0537634408602155e-06,
"loss": 0.3347,
"step": 95
},
{
"epoch": 1.6129032258064515,
"grad_norm": 3.5694108633818504,
"learning_rate": 5.322580645161291e-06,
"loss": 0.3348,
"step": 100
},
{
"epoch": 1.6935483870967742,
"grad_norm": 3.660221218865697,
"learning_rate": 5.591397849462365e-06,
"loss": 0.3272,
"step": 105
},
{
"epoch": 1.7741935483870968,
"grad_norm": 3.4279728194623056,
"learning_rate": 5.8602150537634415e-06,
"loss": 0.2729,
"step": 110
},
{
"epoch": 1.8548387096774195,
"grad_norm": 3.3534086258710825,
"learning_rate": 6.129032258064517e-06,
"loss": 0.2944,
"step": 115
},
{
"epoch": 1.935483870967742,
"grad_norm": 3.040737011508716,
"learning_rate": 6.397849462365592e-06,
"loss": 0.3298,
"step": 120
},
{
"epoch": 2.0161290322580645,
"grad_norm": 3.0276013694676283,
"learning_rate": 6.666666666666667e-06,
"loss": 0.2396,
"step": 125
},
{
"epoch": 2.096774193548387,
"grad_norm": 4.83974864448008,
"learning_rate": 6.935483870967743e-06,
"loss": 0.1704,
"step": 130
},
{
"epoch": 2.1774193548387095,
"grad_norm": 3.078350989376756,
"learning_rate": 7.204301075268818e-06,
"loss": 0.1689,
"step": 135
},
{
"epoch": 2.258064516129032,
"grad_norm": 3.4510008163142754,
"learning_rate": 7.4731182795698935e-06,
"loss": 0.1483,
"step": 140
},
{
"epoch": 2.338709677419355,
"grad_norm": 3.3742727028380353,
"learning_rate": 7.741935483870968e-06,
"loss": 0.1847,
"step": 145
},
{
"epoch": 2.4193548387096775,
"grad_norm": 2.9190658488467767,
"learning_rate": 8.010752688172043e-06,
"loss": 0.181,
"step": 150
},
{
"epoch": 2.5,
"grad_norm": 3.2768686821304227,
"learning_rate": 8.279569892473119e-06,
"loss": 0.201,
"step": 155
},
{
"epoch": 2.5806451612903225,
"grad_norm": 2.8067701037574753,
"learning_rate": 8.548387096774194e-06,
"loss": 0.1922,
"step": 160
},
{
"epoch": 2.661290322580645,
"grad_norm": 2.951622834663203,
"learning_rate": 8.81720430107527e-06,
"loss": 0.2015,
"step": 165
},
{
"epoch": 2.741935483870968,
"grad_norm": 3.03623148237881,
"learning_rate": 9.086021505376345e-06,
"loss": 0.1822,
"step": 170
},
{
"epoch": 2.8225806451612905,
"grad_norm": 3.0772573560886514,
"learning_rate": 9.35483870967742e-06,
"loss": 0.227,
"step": 175
},
{
"epoch": 2.903225806451613,
"grad_norm": 2.9654885185259827,
"learning_rate": 9.623655913978495e-06,
"loss": 0.2212,
"step": 180
},
{
"epoch": 2.9838709677419355,
"grad_norm": 3.067899631927538,
"learning_rate": 9.89247311827957e-06,
"loss": 0.2277,
"step": 185
},
{
"epoch": 3.064516129032258,
"grad_norm": 2.7807126927253187,
"learning_rate": 9.999920755303033e-06,
"loss": 0.1288,
"step": 190
},
{
"epoch": 3.1451612903225805,
"grad_norm": 3.4305790367630777,
"learning_rate": 9.999436491251425e-06,
"loss": 0.1393,
"step": 195
},
{
"epoch": 3.225806451612903,
"grad_norm": 2.7900737962648043,
"learning_rate": 9.998512030567253e-06,
"loss": 0.1298,
"step": 200
},
{
"epoch": 3.306451612903226,
"grad_norm": 2.349901750902021,
"learning_rate": 9.99714745464859e-06,
"loss": 0.1306,
"step": 205
},
{
"epoch": 3.3870967741935485,
"grad_norm": 2.77370394045052,
"learning_rate": 9.995342883645325e-06,
"loss": 0.1521,
"step": 210
},
{
"epoch": 3.467741935483871,
"grad_norm": 3.0084835646623094,
"learning_rate": 9.993098476448576e-06,
"loss": 0.1314,
"step": 215
},
{
"epoch": 3.5483870967741935,
"grad_norm": 3.093621590181524,
"learning_rate": 9.990414430676716e-06,
"loss": 0.1545,
"step": 220
},
{
"epoch": 3.629032258064516,
"grad_norm": 3.1528581559810305,
"learning_rate": 9.987290982657961e-06,
"loss": 0.1929,
"step": 225
},
{
"epoch": 3.709677419354839,
"grad_norm": 2.8445198693562816,
"learning_rate": 9.983728407409565e-06,
"loss": 0.1305,
"step": 230
},
{
"epoch": 3.790322580645161,
"grad_norm": 2.960192414152783,
"learning_rate": 9.979727018613607e-06,
"loss": 0.1425,
"step": 235
},
{
"epoch": 3.870967741935484,
"grad_norm": 2.6571319169632703,
"learning_rate": 9.975287168589369e-06,
"loss": 0.1491,
"step": 240
},
{
"epoch": 3.9516129032258065,
"grad_norm": 3.3868215591855253,
"learning_rate": 9.970409248262314e-06,
"loss": 0.1688,
"step": 245
},
{
"epoch": 4.032258064516129,
"grad_norm": 2.091135299801856,
"learning_rate": 9.965093687129669e-06,
"loss": 0.1028,
"step": 250
},
{
"epoch": 4.112903225806452,
"grad_norm": 3.802878910271049,
"learning_rate": 9.959340953222602e-06,
"loss": 0.0842,
"step": 255
},
{
"epoch": 4.193548387096774,
"grad_norm": 3.7199837682735684,
"learning_rate": 9.953151553065019e-06,
"loss": 0.088,
"step": 260
},
{
"epoch": 4.274193548387097,
"grad_norm": 2.2960394303716116,
"learning_rate": 9.94652603162896e-06,
"loss": 0.0884,
"step": 265
},
{
"epoch": 4.354838709677419,
"grad_norm": 2.465062363580101,
"learning_rate": 9.939464972286618e-06,
"loss": 0.1006,
"step": 270
},
{
"epoch": 4.435483870967742,
"grad_norm": 2.0327702748628553,
"learning_rate": 9.931968996758972e-06,
"loss": 0.0899,
"step": 275
},
{
"epoch": 4.516129032258064,
"grad_norm": 2.305360963756021,
"learning_rate": 9.924038765061042e-06,
"loss": 0.079,
"step": 280
},
{
"epoch": 4.596774193548387,
"grad_norm": 2.4662415593135725,
"learning_rate": 9.915674975443779e-06,
"loss": 0.0875,
"step": 285
},
{
"epoch": 4.67741935483871,
"grad_norm": 2.555687280861228,
"learning_rate": 9.906878364332586e-06,
"loss": 0.0953,
"step": 290
},
{
"epoch": 4.758064516129032,
"grad_norm": 2.3818809469476476,
"learning_rate": 9.897649706262474e-06,
"loss": 0.0855,
"step": 295
},
{
"epoch": 4.838709677419355,
"grad_norm": 2.30924468453585,
"learning_rate": 9.88798981380986e-06,
"loss": 0.1138,
"step": 300
},
{
"epoch": 4.919354838709677,
"grad_norm": 2.047820547358064,
"learning_rate": 9.877899537521028e-06,
"loss": 0.1256,
"step": 305
},
{
"epoch": 5.0,
"grad_norm": 2.7534037559978417,
"learning_rate": 9.867379765837237e-06,
"loss": 0.1109,
"step": 310
},
{
"epoch": 5.080645161290323,
"grad_norm": 2.1913087837549052,
"learning_rate": 9.85643142501649e-06,
"loss": 0.0544,
"step": 315
},
{
"epoch": 5.161290322580645,
"grad_norm": 2.293004207498763,
"learning_rate": 9.845055479051986e-06,
"loss": 0.0678,
"step": 320
},
{
"epoch": 5.241935483870968,
"grad_norm": 1.9502100689755515,
"learning_rate": 9.833252929587231e-06,
"loss": 0.0596,
"step": 325
},
{
"epoch": 5.32258064516129,
"grad_norm": 2.377741796012424,
"learning_rate": 9.821024815827854e-06,
"loss": 0.0536,
"step": 330
},
{
"epoch": 5.403225806451613,
"grad_norm": 1.8788141547729034,
"learning_rate": 9.808372214450093e-06,
"loss": 0.066,
"step": 335
},
{
"epoch": 5.483870967741936,
"grad_norm": 2.18752083985909,
"learning_rate": 9.795296239506011e-06,
"loss": 0.064,
"step": 340
},
{
"epoch": 5.564516129032258,
"grad_norm": 2.1214320464857432,
"learning_rate": 9.781798042325392e-06,
"loss": 0.0718,
"step": 345
},
{
"epoch": 5.645161290322581,
"grad_norm": 2.3419858522206423,
"learning_rate": 9.767878811414373e-06,
"loss": 0.0751,
"step": 350
},
{
"epoch": 5.725806451612903,
"grad_norm": 2.0692559600108846,
"learning_rate": 9.753539772350792e-06,
"loss": 0.0559,
"step": 355
},
{
"epoch": 5.806451612903226,
"grad_norm": 1.7149217664306626,
"learning_rate": 9.738782187676282e-06,
"loss": 0.0615,
"step": 360
},
{
"epoch": 5.887096774193548,
"grad_norm": 2.420112879389103,
"learning_rate": 9.723607356785103e-06,
"loss": 0.0651,
"step": 365
},
{
"epoch": 5.967741935483871,
"grad_norm": 2.0342725263869488,
"learning_rate": 9.70801661580973e-06,
"loss": 0.0657,
"step": 370
},
{
"epoch": 6.048387096774194,
"grad_norm": 1.5676113830493696,
"learning_rate": 9.692011337503212e-06,
"loss": 0.048,
"step": 375
},
{
"epoch": 6.129032258064516,
"grad_norm": 1.4853313640607673,
"learning_rate": 9.675592931118293e-06,
"loss": 0.0395,
"step": 380
},
{
"epoch": 6.209677419354839,
"grad_norm": 2.076517691069182,
"learning_rate": 9.658762842283343e-06,
"loss": 0.0478,
"step": 385
},
{
"epoch": 6.290322580645161,
"grad_norm": 1.6175069613510986,
"learning_rate": 9.641522552875055e-06,
"loss": 0.0402,
"step": 390
},
{
"epoch": 6.370967741935484,
"grad_norm": 2.536372096562857,
"learning_rate": 9.62387358088798e-06,
"loss": 0.0512,
"step": 395
},
{
"epoch": 6.451612903225806,
"grad_norm": 1.8118088837546005,
"learning_rate": 9.605817480300863e-06,
"loss": 0.0534,
"step": 400
},
{
"epoch": 6.532258064516129,
"grad_norm": 2.286351385709345,
"learning_rate": 9.587355840939813e-06,
"loss": 0.0478,
"step": 405
},
{
"epoch": 6.612903225806452,
"grad_norm": 1.8901897009499684,
"learning_rate": 9.568490288338324e-06,
"loss": 0.0425,
"step": 410
},
{
"epoch": 6.693548387096774,
"grad_norm": 1.5196329706278056,
"learning_rate": 9.549222483594154e-06,
"loss": 0.0447,
"step": 415
},
{
"epoch": 6.774193548387097,
"grad_norm": 1.4880512934239636,
"learning_rate": 9.529554123223053e-06,
"loss": 0.0433,
"step": 420
},
{
"epoch": 6.854838709677419,
"grad_norm": 1.711263234463656,
"learning_rate": 9.5094869390094e-06,
"loss": 0.0452,
"step": 425
},
{
"epoch": 6.935483870967742,
"grad_norm": 1.8242713772702877,
"learning_rate": 9.48902269785371e-06,
"loss": 0.0417,
"step": 430
},
{
"epoch": 7.016129032258065,
"grad_norm": 1.408406163892193,
"learning_rate": 9.468163201617063e-06,
"loss": 0.05,
"step": 435
},
{
"epoch": 7.096774193548387,
"grad_norm": 1.0961989080978434,
"learning_rate": 9.446910286962453e-06,
"loss": 0.0249,
"step": 440
},
{
"epoch": 7.17741935483871,
"grad_norm": 1.7189796665774795,
"learning_rate": 9.425265825193077e-06,
"loss": 0.0317,
"step": 445
},
{
"epoch": 7.258064516129032,
"grad_norm": 1.9348300573137698,
"learning_rate": 9.403231722087554e-06,
"loss": 0.0347,
"step": 450
},
{
"epoch": 7.338709677419355,
"grad_norm": 1.7296791252489212,
"learning_rate": 9.380809917732132e-06,
"loss": 0.031,
"step": 455
},
{
"epoch": 7.419354838709677,
"grad_norm": 1.301188666000411,
"learning_rate": 9.358002386349862e-06,
"loss": 0.0337,
"step": 460
},
{
"epoch": 7.5,
"grad_norm": 1.6849307326292067,
"learning_rate": 9.334811136126778e-06,
"loss": 0.034,
"step": 465
},
{
"epoch": 7.580645161290323,
"grad_norm": 1.8341284468308772,
"learning_rate": 9.31123820903506e-06,
"loss": 0.0425,
"step": 470
},
{
"epoch": 7.661290322580645,
"grad_norm": 1.6185716325286443,
"learning_rate": 9.287285680653254e-06,
"loss": 0.0334,
"step": 475
},
{
"epoch": 7.741935483870968,
"grad_norm": 1.4082036883187536,
"learning_rate": 9.262955659983522e-06,
"loss": 0.0365,
"step": 480
},
{
"epoch": 7.82258064516129,
"grad_norm": 1.9810172286312782,
"learning_rate": 9.238250289265921e-06,
"loss": 0.031,
"step": 485
},
{
"epoch": 7.903225806451613,
"grad_norm": 1.8318457886483148,
"learning_rate": 9.21317174378982e-06,
"loss": 0.0354,
"step": 490
},
{
"epoch": 7.983870967741936,
"grad_norm": 1.674905845501415,
"learning_rate": 9.187722231702326e-06,
"loss": 0.0409,
"step": 495
},
{
"epoch": 8.064516129032258,
"grad_norm": 1.206360757615427,
"learning_rate": 9.161903993813892e-06,
"loss": 0.0276,
"step": 500
},
{
"epoch": 8.14516129032258,
"grad_norm": 0.9995201803814792,
"learning_rate": 9.135719303400995e-06,
"loss": 0.019,
"step": 505
},
{
"epoch": 8.225806451612904,
"grad_norm": 1.77138842254183,
"learning_rate": 9.10917046600598e-06,
"loss": 0.0235,
"step": 510
},
{
"epoch": 8.306451612903226,
"grad_norm": 1.772318685251266,
"learning_rate": 9.082259819234063e-06,
"loss": 0.0262,
"step": 515
},
{
"epoch": 8.387096774193548,
"grad_norm": 1.3694563307357386,
"learning_rate": 9.054989732547507e-06,
"loss": 0.0274,
"step": 520
},
{
"epoch": 8.46774193548387,
"grad_norm": 1.5818168003304018,
"learning_rate": 9.027362607056986e-06,
"loss": 0.0281,
"step": 525
},
{
"epoch": 8.548387096774194,
"grad_norm": 1.477790915380952,
"learning_rate": 8.999380875310176e-06,
"loss": 0.0274,
"step": 530
},
{
"epoch": 8.629032258064516,
"grad_norm": 1.463450411153143,
"learning_rate": 8.971047001077561e-06,
"loss": 0.0305,
"step": 535
},
{
"epoch": 8.709677419354838,
"grad_norm": 1.4651802172060195,
"learning_rate": 8.942363479135516e-06,
"loss": 0.0293,
"step": 540
},
{
"epoch": 8.790322580645162,
"grad_norm": 1.4324189291721474,
"learning_rate": 8.913332835046629e-06,
"loss": 0.0239,
"step": 545
},
{
"epoch": 8.870967741935484,
"grad_norm": 1.5257751575787353,
"learning_rate": 8.883957624937333e-06,
"loss": 0.0261,
"step": 550
},
{
"epoch": 8.951612903225806,
"grad_norm": 1.6993511552941885,
"learning_rate": 8.854240435272842e-06,
"loss": 0.0484,
"step": 555
},
{
"epoch": 9.03225806451613,
"grad_norm": 1.159130365005857,
"learning_rate": 8.824183882629411e-06,
"loss": 0.027,
"step": 560
},
{
"epoch": 9.112903225806452,
"grad_norm": 1.2166909931601617,
"learning_rate": 8.793790613463956e-06,
"loss": 0.0183,
"step": 565
},
{
"epoch": 9.193548387096774,
"grad_norm": 1.160324774431725,
"learning_rate": 8.763063303881021e-06,
"loss": 0.0188,
"step": 570
},
{
"epoch": 9.274193548387096,
"grad_norm": 1.3440474061044185,
"learning_rate": 8.73200465939717e-06,
"loss": 0.0224,
"step": 575
},
{
"epoch": 9.35483870967742,
"grad_norm": 1.1812237336382116,
"learning_rate": 8.700617414702746e-06,
"loss": 0.0222,
"step": 580
},
{
"epoch": 9.435483870967742,
"grad_norm": 1.2040220371499268,
"learning_rate": 8.668904333421098e-06,
"loss": 0.0213,
"step": 585
},
{
"epoch": 9.516129032258064,
"grad_norm": 1.1242316338416976,
"learning_rate": 8.636868207865244e-06,
"loss": 0.026,
"step": 590
},
{
"epoch": 9.596774193548388,
"grad_norm": 1.2056824125041377,
"learning_rate": 8.604511858792006e-06,
"loss": 0.0212,
"step": 595
},
{
"epoch": 9.67741935483871,
"grad_norm": 1.2737111889090427,
"learning_rate": 8.571838135153645e-06,
"loss": 0.0221,
"step": 600
},
{
"epoch": 9.758064516129032,
"grad_norm": 1.2658665183240156,
"learning_rate": 8.538849913847019e-06,
"loss": 0.0199,
"step": 605
},
{
"epoch": 9.838709677419354,
"grad_norm": 1.444864038120413,
"learning_rate": 8.505550099460264e-06,
"loss": 0.0242,
"step": 610
},
{
"epoch": 9.919354838709678,
"grad_norm": 1.464062720341168,
"learning_rate": 8.471941624017058e-06,
"loss": 0.0241,
"step": 615
},
{
"epoch": 10.0,
"grad_norm": 1.159759434728226,
"learning_rate": 8.43802744671845e-06,
"loss": 0.0262,
"step": 620
},
{
"epoch": 10.080645161290322,
"grad_norm": 1.2344076422984258,
"learning_rate": 8.403810553682307e-06,
"loss": 0.018,
"step": 625
},
{
"epoch": 10.161290322580646,
"grad_norm": 1.1047319831516356,
"learning_rate": 8.369293957680397e-06,
"loss": 0.0156,
"step": 630
},
{
"epoch": 10.241935483870968,
"grad_norm": 0.9725174239581482,
"learning_rate": 8.334480697873101e-06,
"loss": 0.014,
"step": 635
},
{
"epoch": 10.32258064516129,
"grad_norm": 1.0873207313793845,
"learning_rate": 8.299373839541829e-06,
"loss": 0.0184,
"step": 640
},
{
"epoch": 10.403225806451612,
"grad_norm": 1.1713325775584265,
"learning_rate": 8.26397647381912e-06,
"loss": 0.0191,
"step": 645
},
{
"epoch": 10.483870967741936,
"grad_norm": 1.1289992391197494,
"learning_rate": 8.228291717416472e-06,
"loss": 0.0192,
"step": 650
},
{
"epoch": 10.564516129032258,
"grad_norm": 1.4540729880318348,
"learning_rate": 8.192322712349917e-06,
"loss": 0.0209,
"step": 655
},
{
"epoch": 10.64516129032258,
"grad_norm": 1.2131150635320254,
"learning_rate": 8.15607262566337e-06,
"loss": 0.0201,
"step": 660
},
{
"epoch": 10.725806451612904,
"grad_norm": 1.583339620420521,
"learning_rate": 8.119544649149762e-06,
"loss": 0.0207,
"step": 665
},
{
"epoch": 10.806451612903226,
"grad_norm": 1.298849150527335,
"learning_rate": 8.08274199907003e-06,
"loss": 0.017,
"step": 670
},
{
"epoch": 10.887096774193548,
"grad_norm": 1.1682269254944644,
"learning_rate": 8.0456679158699e-06,
"loss": 0.0175,
"step": 675
},
{
"epoch": 10.967741935483872,
"grad_norm": 1.3418845328882663,
"learning_rate": 8.008325663894586e-06,
"loss": 0.0199,
"step": 680
},
{
"epoch": 11.048387096774194,
"grad_norm": 1.0350578214030737,
"learning_rate": 7.970718531101365e-06,
"loss": 0.0173,
"step": 685
},
{
"epoch": 11.129032258064516,
"grad_norm": 0.7751062982939178,
"learning_rate": 7.932849828770062e-06,
"loss": 0.0154,
"step": 690
},
{
"epoch": 11.209677419354838,
"grad_norm": 1.1249340896245428,
"learning_rate": 7.89472289121151e-06,
"loss": 0.0153,
"step": 695
},
{
"epoch": 11.290322580645162,
"grad_norm": 1.069133814090821,
"learning_rate": 7.856341075473963e-06,
"loss": 0.0128,
"step": 700
},
{
"epoch": 11.370967741935484,
"grad_norm": 1.3386909438492063,
"learning_rate": 7.817707761047498e-06,
"loss": 0.0147,
"step": 705
},
{
"epoch": 11.451612903225806,
"grad_norm": 1.2544557604239914,
"learning_rate": 7.77882634956647e-06,
"loss": 0.0137,
"step": 710
},
{
"epoch": 11.532258064516128,
"grad_norm": 1.50608779604205,
"learning_rate": 7.739700264509993e-06,
"loss": 0.0158,
"step": 715
},
{
"epoch": 11.612903225806452,
"grad_norm": 0.8383520545182386,
"learning_rate": 7.700332950900504e-06,
"loss": 0.0115,
"step": 720
},
{
"epoch": 11.693548387096774,
"grad_norm": 1.1528102169152703,
"learning_rate": 7.660727875000432e-06,
"loss": 0.0151,
"step": 725
},
{
"epoch": 11.774193548387096,
"grad_norm": 1.1731217245462982,
"learning_rate": 7.6208885240069995e-06,
"loss": 0.0137,
"step": 730
},
{
"epoch": 11.85483870967742,
"grad_norm": 1.0109598241071842,
"learning_rate": 7.5808184057451765e-06,
"loss": 0.0133,
"step": 735
},
{
"epoch": 11.935483870967742,
"grad_norm": 1.1815937319956697,
"learning_rate": 7.540521048358814e-06,
"loss": 0.0168,
"step": 740
},
{
"epoch": 12.016129032258064,
"grad_norm": 0.6727751980657929,
"learning_rate": 7.500000000000001e-06,
"loss": 0.0145,
"step": 745
},
{
"epoch": 12.096774193548388,
"grad_norm": 0.8287342886061079,
"learning_rate": 7.459258828516645e-06,
"loss": 0.0108,
"step": 750
},
{
"epoch": 12.17741935483871,
"grad_norm": 0.8907982669901318,
"learning_rate": 7.418301121138335e-06,
"loss": 0.0082,
"step": 755
},
{
"epoch": 12.258064516129032,
"grad_norm": 1.08087496747789,
"learning_rate": 7.3771304841604764e-06,
"loss": 0.0121,
"step": 760
},
{
"epoch": 12.338709677419354,
"grad_norm": 0.9242022062218133,
"learning_rate": 7.335750542626772e-06,
"loss": 0.0118,
"step": 765
},
{
"epoch": 12.419354838709678,
"grad_norm": 0.6674173817031188,
"learning_rate": 7.294164940010031e-06,
"loss": 0.0088,
"step": 770
},
{
"epoch": 12.5,
"grad_norm": 0.7427776074770551,
"learning_rate": 7.2523773378913655e-06,
"loss": 0.01,
"step": 775
},
{
"epoch": 12.580645161290322,
"grad_norm": 0.8105255073657276,
"learning_rate": 7.210391415637797e-06,
"loss": 0.0102,
"step": 780
},
{
"epoch": 12.661290322580646,
"grad_norm": 0.9355789095839642,
"learning_rate": 7.168210870078277e-06,
"loss": 0.0112,
"step": 785
},
{
"epoch": 12.741935483870968,
"grad_norm": 0.8096036161441085,
"learning_rate": 7.125839415178204e-06,
"loss": 0.0109,
"step": 790
},
{
"epoch": 12.82258064516129,
"grad_norm": 0.6993400061698866,
"learning_rate": 7.083280781712394e-06,
"loss": 0.0125,
"step": 795
},
{
"epoch": 12.903225806451612,
"grad_norm": 0.9083278507643704,
"learning_rate": 7.0405387169365965e-06,
"loss": 0.0128,
"step": 800
},
{
"epoch": 12.983870967741936,
"grad_norm": 0.8581097245501025,
"learning_rate": 6.9976169842575526e-06,
"loss": 0.0095,
"step": 805
},
{
"epoch": 13.064516129032258,
"grad_norm": 1.1318882973999245,
"learning_rate": 6.9545193629016215e-06,
"loss": 0.0053,
"step": 810
},
{
"epoch": 13.14516129032258,
"grad_norm": 0.6323423097700148,
"learning_rate": 6.911249647582036e-06,
"loss": 0.0071,
"step": 815
},
{
"epoch": 13.225806451612904,
"grad_norm": 0.9759430461350306,
"learning_rate": 6.867811648164769e-06,
"loss": 0.0084,
"step": 820
},
{
"epoch": 13.306451612903226,
"grad_norm": 0.5436428472489789,
"learning_rate": 6.824209189333082e-06,
"loss": 0.0059,
"step": 825
},
{
"epoch": 13.387096774193548,
"grad_norm": 0.8064226300726538,
"learning_rate": 6.780446110250766e-06,
"loss": 0.0083,
"step": 830
},
{
"epoch": 13.46774193548387,
"grad_norm": 0.6692079180896646,
"learning_rate": 6.736526264224101e-06,
"loss": 0.0069,
"step": 835
},
{
"epoch": 13.548387096774194,
"grad_norm": 0.7920667479327235,
"learning_rate": 6.692453518362587e-06,
"loss": 0.0087,
"step": 840
},
{
"epoch": 13.629032258064516,
"grad_norm": 0.9743408271346984,
"learning_rate": 6.648231753238431e-06,
"loss": 0.0108,
"step": 845
},
{
"epoch": 13.709677419354838,
"grad_norm": 1.054329395592257,
"learning_rate": 6.603864862544879e-06,
"loss": 0.008,
"step": 850
},
{
"epoch": 13.790322580645162,
"grad_norm": 0.6890711609162895,
"learning_rate": 6.5593567527533715e-06,
"loss": 0.007,
"step": 855
},
{
"epoch": 13.870967741935484,
"grad_norm": 0.8929770220878291,
"learning_rate": 6.514711342769588e-06,
"loss": 0.0094,
"step": 860
},
{
"epoch": 13.951612903225806,
"grad_norm": 0.8470688740181095,
"learning_rate": 6.469932563588386e-06,
"loss": 0.0074,
"step": 865
},
{
"epoch": 14.03225806451613,
"grad_norm": 0.929920833432491,
"learning_rate": 6.425024357947677e-06,
"loss": 0.0081,
"step": 870
},
{
"epoch": 14.112903225806452,
"grad_norm": 0.5368583000455479,
"learning_rate": 6.3799906799812805e-06,
"loss": 0.0039,
"step": 875
},
{
"epoch": 14.193548387096774,
"grad_norm": 0.5542449665774428,
"learning_rate": 6.334835494870759e-06,
"loss": 0.0058,
"step": 880
},
{
"epoch": 14.274193548387096,
"grad_norm": 1.001191478848351,
"learning_rate": 6.289562778496285e-06,
"loss": 0.0058,
"step": 885
},
{
"epoch": 14.35483870967742,
"grad_norm": 0.4192339189229186,
"learning_rate": 6.244176517086573e-06,
"loss": 0.0033,
"step": 890
},
{
"epoch": 14.435483870967742,
"grad_norm": 0.5368429196948196,
"learning_rate": 6.1986807068678926e-06,
"loss": 0.0052,
"step": 895
},
{
"epoch": 14.516129032258064,
"grad_norm": 0.9333482099983527,
"learning_rate": 6.153079353712201e-06,
"loss": 0.0057,
"step": 900
},
{
"epoch": 14.596774193548388,
"grad_norm": 1.0750824183015748,
"learning_rate": 6.107376472784438e-06,
"loss": 0.0064,
"step": 905
},
{
"epoch": 14.67741935483871,
"grad_norm": 0.46525235985013963,
"learning_rate": 6.061576088188981e-06,
"loss": 0.0039,
"step": 910
},
{
"epoch": 14.758064516129032,
"grad_norm": 0.7126091866734285,
"learning_rate": 6.015682232615336e-06,
"loss": 0.0064,
"step": 915
},
{
"epoch": 14.838709677419354,
"grad_norm": 0.5389766142133813,
"learning_rate": 5.969698946983055e-06,
"loss": 0.0046,
"step": 920
},
{
"epoch": 14.919354838709678,
"grad_norm": 0.7912398368360793,
"learning_rate": 5.923630280085948e-06,
"loss": 0.0066,
"step": 925
},
{
"epoch": 15.0,
"grad_norm": 0.3365541360781848,
"learning_rate": 5.877480288235569e-06,
"loss": 0.0041,
"step": 930
}
],
"logging_steps": 5,
"max_steps": 1860,
"num_input_tokens_seen": 0,
"num_train_epochs": 30,
"save_steps": 310,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 17705948282880.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}