| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 15.0, | |
| "eval_steps": 500, | |
| "global_step": 930, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.08064516129032258, | |
| "grad_norm": 8.011420401939231, | |
| "learning_rate": 4.301075268817205e-07, | |
| "loss": 0.7742, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.16129032258064516, | |
| "grad_norm": 10.047350021332239, | |
| "learning_rate": 9.67741935483871e-07, | |
| "loss": 0.7435, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.24193548387096775, | |
| "grad_norm": 5.168316776135969, | |
| "learning_rate": 1.5053763440860217e-06, | |
| "loss": 0.6269, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.3225806451612903, | |
| "grad_norm": 4.938634967365022, | |
| "learning_rate": 2.043010752688172e-06, | |
| "loss": 0.5267, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.4032258064516129, | |
| "grad_norm": 3.953470980651631, | |
| "learning_rate": 2.580645161290323e-06, | |
| "loss": 0.4971, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.4838709677419355, | |
| "grad_norm": 3.546097308432004, | |
| "learning_rate": 3.1182795698924735e-06, | |
| "loss": 0.4602, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5645161290322581, | |
| "grad_norm": 3.227669401286306, | |
| "learning_rate": 3.655913978494624e-06, | |
| "loss": 0.4054, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.6451612903225806, | |
| "grad_norm": 3.3339396869053233, | |
| "learning_rate": 4.193548387096774e-06, | |
| "loss": 0.3888, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.7258064516129032, | |
| "grad_norm": 3.4215451938979196, | |
| "learning_rate": 4.731182795698925e-06, | |
| "loss": 0.3594, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.8064516129032258, | |
| "grad_norm": 3.387868343141131, | |
| "learning_rate": 5.268817204301076e-06, | |
| "loss": 0.4685, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.8870967741935484, | |
| "grad_norm": 3.2480841921521577, | |
| "learning_rate": 5.806451612903226e-06, | |
| "loss": 0.3939, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 2.8836899888015326, | |
| "learning_rate": 6.344086021505377e-06, | |
| "loss": 0.412, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.0483870967741935, | |
| "grad_norm": 2.7064083197919104, | |
| "learning_rate": 6.881720430107528e-06, | |
| "loss": 0.2812, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.129032258064516, | |
| "grad_norm": 3.7213743894015754, | |
| "learning_rate": 7.4193548387096784e-06, | |
| "loss": 0.2671, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.2096774193548387, | |
| "grad_norm": 2.6772868092907207, | |
| "learning_rate": 7.956989247311828e-06, | |
| "loss": 0.2735, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.2903225806451613, | |
| "grad_norm": 3.4624454069403523, | |
| "learning_rate": 8.494623655913979e-06, | |
| "loss": 0.2921, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.370967741935484, | |
| "grad_norm": 3.12986197308312, | |
| "learning_rate": 9.03225806451613e-06, | |
| "loss": 0.2797, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.4516129032258065, | |
| "grad_norm": 2.66480640353227, | |
| "learning_rate": 9.56989247311828e-06, | |
| "loss": 0.2614, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.532258064516129, | |
| "grad_norm": 3.0702920459796323, | |
| "learning_rate": 1.0107526881720431e-05, | |
| "loss": 0.3293, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.6129032258064515, | |
| "grad_norm": 2.929888675419076, | |
| "learning_rate": 1.0645161290322582e-05, | |
| "loss": 0.3314, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.6935483870967742, | |
| "grad_norm": 3.3443392917555164, | |
| "learning_rate": 1.118279569892473e-05, | |
| "loss": 0.3273, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.7741935483870968, | |
| "grad_norm": 2.746276842396526, | |
| "learning_rate": 1.1720430107526883e-05, | |
| "loss": 0.279, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.8548387096774195, | |
| "grad_norm": 2.704379170662015, | |
| "learning_rate": 1.2258064516129034e-05, | |
| "loss": 0.2994, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.935483870967742, | |
| "grad_norm": 2.4185003231715383, | |
| "learning_rate": 1.2795698924731184e-05, | |
| "loss": 0.3372, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.0161290322580645, | |
| "grad_norm": 2.3874071062723043, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.253, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.096774193548387, | |
| "grad_norm": 4.1364120455442315, | |
| "learning_rate": 1.3870967741935486e-05, | |
| "loss": 0.1688, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.1774193548387095, | |
| "grad_norm": 2.945339681148867, | |
| "learning_rate": 1.4408602150537636e-05, | |
| "loss": 0.1823, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.258064516129032, | |
| "grad_norm": 2.849277863512327, | |
| "learning_rate": 1.4946236559139787e-05, | |
| "loss": 0.1695, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.338709677419355, | |
| "grad_norm": 2.8089693447606168, | |
| "learning_rate": 1.5483870967741936e-05, | |
| "loss": 0.2041, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.4193548387096775, | |
| "grad_norm": 2.7875058544898175, | |
| "learning_rate": 1.6021505376344087e-05, | |
| "loss": 0.2015, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 2.9140451817511086, | |
| "learning_rate": 1.6559139784946237e-05, | |
| "loss": 0.2188, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.5806451612903225, | |
| "grad_norm": 2.4255786527564642, | |
| "learning_rate": 1.7096774193548388e-05, | |
| "loss": 0.2127, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.661290322580645, | |
| "grad_norm": 2.3925352807787577, | |
| "learning_rate": 1.763440860215054e-05, | |
| "loss": 0.212, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.741935483870968, | |
| "grad_norm": 2.734010191526564, | |
| "learning_rate": 1.817204301075269e-05, | |
| "loss": 0.2057, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.8225806451612905, | |
| "grad_norm": 2.5495288446706583, | |
| "learning_rate": 1.870967741935484e-05, | |
| "loss": 0.2475, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.903225806451613, | |
| "grad_norm": 3.0351828310456996, | |
| "learning_rate": 1.924731182795699e-05, | |
| "loss": 0.253, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.9838709677419355, | |
| "grad_norm": 2.676416201274647, | |
| "learning_rate": 1.978494623655914e-05, | |
| "loss": 0.2596, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 3.064516129032258, | |
| "grad_norm": 2.370849336507847, | |
| "learning_rate": 1.9999841510606067e-05, | |
| "loss": 0.1421, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 3.1451612903225805, | |
| "grad_norm": 2.7377960125859993, | |
| "learning_rate": 1.999887298250285e-05, | |
| "loss": 0.1712, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 3.225806451612903, | |
| "grad_norm": 2.820149350176967, | |
| "learning_rate": 1.9997024061134506e-05, | |
| "loss": 0.1555, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 3.306451612903226, | |
| "grad_norm": 2.1128027512370293, | |
| "learning_rate": 1.999429490929718e-05, | |
| "loss": 0.152, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 3.3870967741935485, | |
| "grad_norm": 2.1099865691307538, | |
| "learning_rate": 1.999068576729065e-05, | |
| "loss": 0.1657, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 3.467741935483871, | |
| "grad_norm": 2.547789552935568, | |
| "learning_rate": 1.9986196952897152e-05, | |
| "loss": 0.1693, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 3.5483870967741935, | |
| "grad_norm": 2.198776749855706, | |
| "learning_rate": 1.9980828861353432e-05, | |
| "loss": 0.1689, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 3.629032258064516, | |
| "grad_norm": 2.3385638015508503, | |
| "learning_rate": 1.9974581965315923e-05, | |
| "loss": 0.2257, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 3.709677419354839, | |
| "grad_norm": 2.1989820629741934, | |
| "learning_rate": 1.996745681481913e-05, | |
| "loss": 0.154, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.790322580645161, | |
| "grad_norm": 2.814460829579746, | |
| "learning_rate": 1.9959454037227215e-05, | |
| "loss": 0.162, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 3.870967741935484, | |
| "grad_norm": 2.068359313330162, | |
| "learning_rate": 1.9950574337178738e-05, | |
| "loss": 0.177, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.9516129032258065, | |
| "grad_norm": 2.7059184532156864, | |
| "learning_rate": 1.994081849652463e-05, | |
| "loss": 0.2009, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 4.032258064516129, | |
| "grad_norm": 1.6009222694764955, | |
| "learning_rate": 1.9930187374259338e-05, | |
| "loss": 0.1212, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 4.112903225806452, | |
| "grad_norm": 2.0703755307458933, | |
| "learning_rate": 1.9918681906445205e-05, | |
| "loss": 0.0961, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 4.193548387096774, | |
| "grad_norm": 2.311594788643909, | |
| "learning_rate": 1.9906303106130038e-05, | |
| "loss": 0.1417, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 4.274193548387097, | |
| "grad_norm": 1.8666211061978775, | |
| "learning_rate": 1.989305206325792e-05, | |
| "loss": 0.1131, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 4.354838709677419, | |
| "grad_norm": 1.9890820794130148, | |
| "learning_rate": 1.9878929944573236e-05, | |
| "loss": 0.1206, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 4.435483870967742, | |
| "grad_norm": 1.7249409190839584, | |
| "learning_rate": 1.9863937993517943e-05, | |
| "loss": 0.1079, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 4.516129032258064, | |
| "grad_norm": 1.9528407771403076, | |
| "learning_rate": 1.9848077530122083e-05, | |
| "loss": 0.1014, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 4.596774193548387, | |
| "grad_norm": 2.1527829290500535, | |
| "learning_rate": 1.9831349950887557e-05, | |
| "loss": 0.1037, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 4.67741935483871, | |
| "grad_norm": 1.623780546041927, | |
| "learning_rate": 1.981375672866517e-05, | |
| "loss": 0.1175, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 4.758064516129032, | |
| "grad_norm": 1.7998585089781174, | |
| "learning_rate": 1.9795299412524948e-05, | |
| "loss": 0.1037, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 4.838709677419355, | |
| "grad_norm": 2.256914320283658, | |
| "learning_rate": 1.977597962761972e-05, | |
| "loss": 0.1359, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 4.919354838709677, | |
| "grad_norm": 1.5695323201796525, | |
| "learning_rate": 1.9755799075042056e-05, | |
| "loss": 0.1377, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 1.8509458520725823, | |
| "learning_rate": 1.9734759531674474e-05, | |
| "loss": 0.1274, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 5.080645161290323, | |
| "grad_norm": 2.1385239357831174, | |
| "learning_rate": 1.971286285003298e-05, | |
| "loss": 0.066, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 5.161290322580645, | |
| "grad_norm": 1.9663061853058708, | |
| "learning_rate": 1.969011095810397e-05, | |
| "loss": 0.0793, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 5.241935483870968, | |
| "grad_norm": 1.8929709744757282, | |
| "learning_rate": 1.9666505859174462e-05, | |
| "loss": 0.0786, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 5.32258064516129, | |
| "grad_norm": 1.6063115339944203, | |
| "learning_rate": 1.9642049631655708e-05, | |
| "loss": 0.0667, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 5.403225806451613, | |
| "grad_norm": 1.6163876431750075, | |
| "learning_rate": 1.9616744428900186e-05, | |
| "loss": 0.0776, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 5.483870967741936, | |
| "grad_norm": 1.7968306100636957, | |
| "learning_rate": 1.9590592479012022e-05, | |
| "loss": 0.0751, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 5.564516129032258, | |
| "grad_norm": 1.9209533257498246, | |
| "learning_rate": 1.9563596084650785e-05, | |
| "loss": 0.096, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 5.645161290322581, | |
| "grad_norm": 1.4487068403130905, | |
| "learning_rate": 1.9535757622828746e-05, | |
| "loss": 0.0889, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 5.725806451612903, | |
| "grad_norm": 1.3952921694858906, | |
| "learning_rate": 1.9507079544701583e-05, | |
| "loss": 0.0732, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 5.806451612903226, | |
| "grad_norm": 1.276828494747591, | |
| "learning_rate": 1.9477564375352564e-05, | |
| "loss": 0.0813, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 5.887096774193548, | |
| "grad_norm": 1.8732979205600613, | |
| "learning_rate": 1.9447214713570205e-05, | |
| "loss": 0.0803, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 5.967741935483871, | |
| "grad_norm": 1.7001388882225135, | |
| "learning_rate": 1.941603323161946e-05, | |
| "loss": 0.0837, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 6.048387096774194, | |
| "grad_norm": 1.126311969801114, | |
| "learning_rate": 1.9384022675006423e-05, | |
| "loss": 0.0543, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 6.129032258064516, | |
| "grad_norm": 1.5736297709731053, | |
| "learning_rate": 1.9351185862236587e-05, | |
| "loss": 0.0545, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 6.209677419354839, | |
| "grad_norm": 1.700997882047633, | |
| "learning_rate": 1.9317525684566686e-05, | |
| "loss": 0.0649, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 6.290322580645161, | |
| "grad_norm": 1.2491520103424365, | |
| "learning_rate": 1.928304510575011e-05, | |
| "loss": 0.0517, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 6.370967741935484, | |
| "grad_norm": 1.9927911356159098, | |
| "learning_rate": 1.924774716177596e-05, | |
| "loss": 0.0575, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 6.451612903225806, | |
| "grad_norm": 1.405928829545799, | |
| "learning_rate": 1.9211634960601726e-05, | |
| "loss": 0.0656, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 6.532258064516129, | |
| "grad_norm": 1.5977023675995665, | |
| "learning_rate": 1.9174711681879627e-05, | |
| "loss": 0.0599, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 6.612903225806452, | |
| "grad_norm": 1.4828352445884125, | |
| "learning_rate": 1.9136980576676648e-05, | |
| "loss": 0.0501, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 6.693548387096774, | |
| "grad_norm": 1.3445540260231166, | |
| "learning_rate": 1.9098444967188308e-05, | |
| "loss": 0.0686, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 6.774193548387097, | |
| "grad_norm": 1.3685484393569762, | |
| "learning_rate": 1.9059108246446107e-05, | |
| "loss": 0.0549, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 6.854838709677419, | |
| "grad_norm": 1.504608696343651, | |
| "learning_rate": 1.90189738780188e-05, | |
| "loss": 0.0521, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 6.935483870967742, | |
| "grad_norm": 2.3124554784826543, | |
| "learning_rate": 1.897804539570742e-05, | |
| "loss": 0.0618, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 7.016129032258065, | |
| "grad_norm": 1.0101556069317463, | |
| "learning_rate": 1.8936326403234125e-05, | |
| "loss": 0.0594, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 7.096774193548387, | |
| "grad_norm": 1.3413145646882874, | |
| "learning_rate": 1.8893820573924907e-05, | |
| "loss": 0.0395, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 7.17741935483871, | |
| "grad_norm": 1.3557891650154248, | |
| "learning_rate": 1.8850531650386154e-05, | |
| "loss": 0.0423, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 7.258064516129032, | |
| "grad_norm": 1.3407092707020813, | |
| "learning_rate": 1.8806463444175108e-05, | |
| "loss": 0.0496, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 7.338709677419355, | |
| "grad_norm": 1.3060068492508308, | |
| "learning_rate": 1.8761619835464265e-05, | |
| "loss": 0.0404, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 7.419354838709677, | |
| "grad_norm": 1.0175150605179055, | |
| "learning_rate": 1.8716004772699724e-05, | |
| "loss": 0.0428, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "grad_norm": 1.3413688042908085, | |
| "learning_rate": 1.8669622272253555e-05, | |
| "loss": 0.0416, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 7.580645161290323, | |
| "grad_norm": 1.153505595164974, | |
| "learning_rate": 1.862247641807012e-05, | |
| "loss": 0.0579, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 7.661290322580645, | |
| "grad_norm": 1.2354313686927156, | |
| "learning_rate": 1.857457136130651e-05, | |
| "loss": 0.0481, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 7.741935483870968, | |
| "grad_norm": 1.0931719489898546, | |
| "learning_rate": 1.8525911319967043e-05, | |
| "loss": 0.0486, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 7.82258064516129, | |
| "grad_norm": 1.5940570451158567, | |
| "learning_rate": 1.8476500578531843e-05, | |
| "loss": 0.0451, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 7.903225806451613, | |
| "grad_norm": 1.1732082302822824, | |
| "learning_rate": 1.842634348757964e-05, | |
| "loss": 0.043, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 7.983870967741936, | |
| "grad_norm": 1.4772573693475428, | |
| "learning_rate": 1.837544446340465e-05, | |
| "loss": 0.0528, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 8.064516129032258, | |
| "grad_norm": 0.8955083646170174, | |
| "learning_rate": 1.8323807987627784e-05, | |
| "loss": 0.0334, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 8.14516129032258, | |
| "grad_norm": 0.9631422107720344, | |
| "learning_rate": 1.827143860680199e-05, | |
| "loss": 0.0291, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 8.225806451612904, | |
| "grad_norm": 1.2099608078687119, | |
| "learning_rate": 1.821834093201196e-05, | |
| "loss": 0.0337, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 8.306451612903226, | |
| "grad_norm": 1.118942530993604, | |
| "learning_rate": 1.8164519638468127e-05, | |
| "loss": 0.0311, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 8.387096774193548, | |
| "grad_norm": 1.107506450946018, | |
| "learning_rate": 1.8109979465095014e-05, | |
| "loss": 0.0285, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 8.46774193548387, | |
| "grad_norm": 1.107697878261832, | |
| "learning_rate": 1.805472521411397e-05, | |
| "loss": 0.033, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 8.548387096774194, | |
| "grad_norm": 1.1679751114840105, | |
| "learning_rate": 1.799876175062035e-05, | |
| "loss": 0.0362, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 8.629032258064516, | |
| "grad_norm": 0.9679470036614197, | |
| "learning_rate": 1.7942094002155122e-05, | |
| "loss": 0.0357, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 8.709677419354838, | |
| "grad_norm": 1.1615481659249676, | |
| "learning_rate": 1.7884726958271033e-05, | |
| "loss": 0.036, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 8.790322580645162, | |
| "grad_norm": 1.3403691682577592, | |
| "learning_rate": 1.7826665670093258e-05, | |
| "loss": 0.0332, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 8.870967741935484, | |
| "grad_norm": 1.123563696070656, | |
| "learning_rate": 1.7767915249874666e-05, | |
| "loss": 0.0375, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 8.951612903225806, | |
| "grad_norm": 1.2116386009188533, | |
| "learning_rate": 1.7708480870545684e-05, | |
| "loss": 0.044, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 9.03225806451613, | |
| "grad_norm": 0.7979141814706628, | |
| "learning_rate": 1.7648367765258823e-05, | |
| "loss": 0.0315, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 9.112903225806452, | |
| "grad_norm": 2.4300360668926886, | |
| "learning_rate": 1.758758122692791e-05, | |
| "loss": 0.0321, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 9.193548387096774, | |
| "grad_norm": 1.052541564410572, | |
| "learning_rate": 1.7526126607762043e-05, | |
| "loss": 0.0314, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 9.274193548387096, | |
| "grad_norm": 0.9024632271562477, | |
| "learning_rate": 1.746400931879434e-05, | |
| "loss": 0.0296, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 9.35483870967742, | |
| "grad_norm": 1.0547389461651557, | |
| "learning_rate": 1.7401234829405492e-05, | |
| "loss": 0.0264, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 9.435483870967742, | |
| "grad_norm": 1.1560142054705027, | |
| "learning_rate": 1.7337808666842196e-05, | |
| "loss": 0.0256, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 9.516129032258064, | |
| "grad_norm": 1.1516296207125127, | |
| "learning_rate": 1.7273736415730488e-05, | |
| "loss": 0.0355, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 9.596774193548388, | |
| "grad_norm": 0.9268725755353335, | |
| "learning_rate": 1.7209023717584013e-05, | |
| "loss": 0.0265, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 9.67741935483871, | |
| "grad_norm": 1.0873005455780382, | |
| "learning_rate": 1.714367627030729e-05, | |
| "loss": 0.0344, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 9.758064516129032, | |
| "grad_norm": 0.9095844635646306, | |
| "learning_rate": 1.7077699827694038e-05, | |
| "loss": 0.0362, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 9.838709677419354, | |
| "grad_norm": 1.2515206515268222, | |
| "learning_rate": 1.7011100198920528e-05, | |
| "loss": 0.0297, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 9.919354838709678, | |
| "grad_norm": 1.0327741781324877, | |
| "learning_rate": 1.6943883248034116e-05, | |
| "loss": 0.028, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.9826424300809284, | |
| "learning_rate": 1.68760548934369e-05, | |
| "loss": 0.0327, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 10.080645161290322, | |
| "grad_norm": 0.9383755847813475, | |
| "learning_rate": 1.6807621107364613e-05, | |
| "loss": 0.0207, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 10.161290322580646, | |
| "grad_norm": 0.9662163918019707, | |
| "learning_rate": 1.6738587915360795e-05, | |
| "loss": 0.0193, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 10.241935483870968, | |
| "grad_norm": 0.8536385882747535, | |
| "learning_rate": 1.6668961395746203e-05, | |
| "loss": 0.0184, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 10.32258064516129, | |
| "grad_norm": 0.788293552656787, | |
| "learning_rate": 1.6598747679083657e-05, | |
| "loss": 0.0303, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 10.403225806451612, | |
| "grad_norm": 0.9845898875068094, | |
| "learning_rate": 1.652795294763824e-05, | |
| "loss": 0.0216, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 10.483870967741936, | |
| "grad_norm": 0.6291325552961899, | |
| "learning_rate": 1.6456583434832943e-05, | |
| "loss": 0.021, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 10.564516129032258, | |
| "grad_norm": 0.8801054632267955, | |
| "learning_rate": 1.6384645424699835e-05, | |
| "loss": 0.0245, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 10.64516129032258, | |
| "grad_norm": 0.9368371046853895, | |
| "learning_rate": 1.631214525132674e-05, | |
| "loss": 0.0199, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 10.725806451612904, | |
| "grad_norm": 1.6268394900105974, | |
| "learning_rate": 1.6239089298299524e-05, | |
| "loss": 0.0276, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 10.806451612903226, | |
| "grad_norm": 0.999434975658911, | |
| "learning_rate": 1.616548399814006e-05, | |
| "loss": 0.0232, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 10.887096774193548, | |
| "grad_norm": 0.7559937092929928, | |
| "learning_rate": 1.60913358317398e-05, | |
| "loss": 0.0216, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 10.967741935483872, | |
| "grad_norm": 0.9110927404449887, | |
| "learning_rate": 1.6016651327789172e-05, | |
| "loss": 0.023, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 11.048387096774194, | |
| "grad_norm": 0.7820630863655826, | |
| "learning_rate": 1.594143706220273e-05, | |
| "loss": 0.0175, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 11.129032258064516, | |
| "grad_norm": 0.8615839081674891, | |
| "learning_rate": 1.5865699657540124e-05, | |
| "loss": 0.0179, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 11.209677419354838, | |
| "grad_norm": 0.7485778670469995, | |
| "learning_rate": 1.578944578242302e-05, | |
| "loss": 0.0165, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 11.290322580645162, | |
| "grad_norm": 0.5653864836565881, | |
| "learning_rate": 1.5712682150947926e-05, | |
| "loss": 0.0138, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 11.370967741935484, | |
| "grad_norm": 1.0152034094965365, | |
| "learning_rate": 1.5635415522094995e-05, | |
| "loss": 0.0213, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 11.451612903225806, | |
| "grad_norm": 0.7290881851961156, | |
| "learning_rate": 1.555765269913294e-05, | |
| "loss": 0.0155, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 11.532258064516128, | |
| "grad_norm": 0.9439936389696655, | |
| "learning_rate": 1.5479400529019987e-05, | |
| "loss": 0.0144, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 11.612903225806452, | |
| "grad_norm": 0.7958692168661642, | |
| "learning_rate": 1.5400665901801007e-05, | |
| "loss": 0.018, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 11.693548387096774, | |
| "grad_norm": 0.7223477960561964, | |
| "learning_rate": 1.5321455750000864e-05, | |
| "loss": 0.0135, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 11.774193548387096, | |
| "grad_norm": 0.6593842682896188, | |
| "learning_rate": 1.5241777048013999e-05, | |
| "loss": 0.0148, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 11.85483870967742, | |
| "grad_norm": 0.8453520234181606, | |
| "learning_rate": 1.5161636811490353e-05, | |
| "loss": 0.0153, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 11.935483870967742, | |
| "grad_norm": 0.8025417174419947, | |
| "learning_rate": 1.5081042096717629e-05, | |
| "loss": 0.0164, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 12.016129032258064, | |
| "grad_norm": 0.6831031548209219, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.0148, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 12.096774193548388, | |
| "grad_norm": 0.7608254514246531, | |
| "learning_rate": 1.491851765703329e-05, | |
| "loss": 0.012, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 12.17741935483871, | |
| "grad_norm": 0.5136759705172103, | |
| "learning_rate": 1.483660224227667e-05, | |
| "loss": 0.0068, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 12.258064516129032, | |
| "grad_norm": 0.636212407574805, | |
| "learning_rate": 1.4754260968320953e-05, | |
| "loss": 0.0127, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 12.338709677419354, | |
| "grad_norm": 0.44635685167884065, | |
| "learning_rate": 1.4671501085253544e-05, | |
| "loss": 0.0119, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 12.419354838709678, | |
| "grad_norm": 0.48363924785074075, | |
| "learning_rate": 1.4588329880020063e-05, | |
| "loss": 0.0103, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "grad_norm": 0.5439281715798179, | |
| "learning_rate": 1.4504754675782731e-05, | |
| "loss": 0.0128, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 12.580645161290322, | |
| "grad_norm": 0.5354009938961187, | |
| "learning_rate": 1.4420782831275593e-05, | |
| "loss": 0.0116, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 12.661290322580646, | |
| "grad_norm": 0.6688889724203744, | |
| "learning_rate": 1.4336421740156554e-05, | |
| "loss": 0.0077, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 12.741935483870968, | |
| "grad_norm": 0.6812534753778359, | |
| "learning_rate": 1.4251678830356408e-05, | |
| "loss": 0.0103, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 12.82258064516129, | |
| "grad_norm": 0.6501175433498573, | |
| "learning_rate": 1.4166561563424787e-05, | |
| "loss": 0.0112, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 12.903225806451612, | |
| "grad_norm": 0.650751022479761, | |
| "learning_rate": 1.4081077433873193e-05, | |
| "loss": 0.0113, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 12.983870967741936, | |
| "grad_norm": 0.7114897848206225, | |
| "learning_rate": 1.3995233968515105e-05, | |
| "loss": 0.0089, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 13.064516129032258, | |
| "grad_norm": 0.6357652159202873, | |
| "learning_rate": 1.3909038725803243e-05, | |
| "loss": 0.0062, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 13.14516129032258, | |
| "grad_norm": 0.5540274340711291, | |
| "learning_rate": 1.3822499295164073e-05, | |
| "loss": 0.0079, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 13.225806451612904, | |
| "grad_norm": 0.42600296520707526, | |
| "learning_rate": 1.3735623296329537e-05, | |
| "loss": 0.0067, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 13.306451612903226, | |
| "grad_norm": 0.45535092056177595, | |
| "learning_rate": 1.3648418378666164e-05, | |
| "loss": 0.0075, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 13.387096774193548, | |
| "grad_norm": 0.5270286975326697, | |
| "learning_rate": 1.3560892220501532e-05, | |
| "loss": 0.0057, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 13.46774193548387, | |
| "grad_norm": 0.3997583004602451, | |
| "learning_rate": 1.3473052528448203e-05, | |
| "loss": 0.005, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 13.548387096774194, | |
| "grad_norm": 0.4555926450092979, | |
| "learning_rate": 1.3384907036725174e-05, | |
| "loss": 0.0061, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 13.629032258064516, | |
| "grad_norm": 0.5495172127346515, | |
| "learning_rate": 1.3296463506476862e-05, | |
| "loss": 0.0063, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 13.709677419354838, | |
| "grad_norm": 0.25674804312426736, | |
| "learning_rate": 1.3207729725089757e-05, | |
| "loss": 0.0056, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 13.790322580645162, | |
| "grad_norm": 0.4562073850046896, | |
| "learning_rate": 1.3118713505506743e-05, | |
| "loss": 0.005, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 13.870967741935484, | |
| "grad_norm": 0.6381309636636849, | |
| "learning_rate": 1.3029422685539176e-05, | |
| "loss": 0.0091, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 13.951612903225806, | |
| "grad_norm": 0.4675515960872932, | |
| "learning_rate": 1.2939865127176771e-05, | |
| "loss": 0.0067, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 14.03225806451613, | |
| "grad_norm": 0.48524387355096776, | |
| "learning_rate": 1.2850048715895354e-05, | |
| "loss": 0.0053, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 14.112903225806452, | |
| "grad_norm": 0.48560164331553235, | |
| "learning_rate": 1.2759981359962561e-05, | |
| "loss": 0.0046, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 14.193548387096774, | |
| "grad_norm": 0.31174892099656043, | |
| "learning_rate": 1.2669670989741519e-05, | |
| "loss": 0.0056, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 14.274193548387096, | |
| "grad_norm": 0.36686246972144787, | |
| "learning_rate": 1.257912555699257e-05, | |
| "loss": 0.0032, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 14.35483870967742, | |
| "grad_norm": 0.4026724810879191, | |
| "learning_rate": 1.2488353034173146e-05, | |
| "loss": 0.0043, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 14.435483870967742, | |
| "grad_norm": 0.5052593922852059, | |
| "learning_rate": 1.2397361413735785e-05, | |
| "loss": 0.0035, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 14.516129032258064, | |
| "grad_norm": 0.20112599123413197, | |
| "learning_rate": 1.2306158707424402e-05, | |
| "loss": 0.0015, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 14.596774193548388, | |
| "grad_norm": 0.7258015458821793, | |
| "learning_rate": 1.2214752945568875e-05, | |
| "loss": 0.0041, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 14.67741935483871, | |
| "grad_norm": 0.5402118374404563, | |
| "learning_rate": 1.2123152176377962e-05, | |
| "loss": 0.0027, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 14.758064516129032, | |
| "grad_norm": 0.4880587083553651, | |
| "learning_rate": 1.2031364465230672e-05, | |
| "loss": 0.0058, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 14.838709677419354, | |
| "grad_norm": 0.3108599971494139, | |
| "learning_rate": 1.193939789396611e-05, | |
| "loss": 0.0052, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 14.919354838709678, | |
| "grad_norm": 0.40475472593096934, | |
| "learning_rate": 1.1847260560171895e-05, | |
| "loss": 0.0044, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "grad_norm": 0.43803638841417536, | |
| "learning_rate": 1.1754960576471138e-05, | |
| "loss": 0.0041, | |
| "step": 930 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1860, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 30, | |
| "save_steps": 310, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 17705948282880.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |