| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 30.0, | |
| "eval_steps": 500, | |
| "global_step": 1860, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.08064516129032258, | |
| "grad_norm": 8.011420401939231, | |
| "learning_rate": 4.301075268817205e-07, | |
| "loss": 0.7742, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.16129032258064516, | |
| "grad_norm": 10.047350021332239, | |
| "learning_rate": 9.67741935483871e-07, | |
| "loss": 0.7435, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.24193548387096775, | |
| "grad_norm": 5.168316776135969, | |
| "learning_rate": 1.5053763440860217e-06, | |
| "loss": 0.6269, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.3225806451612903, | |
| "grad_norm": 4.938634967365022, | |
| "learning_rate": 2.043010752688172e-06, | |
| "loss": 0.5267, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.4032258064516129, | |
| "grad_norm": 3.953470980651631, | |
| "learning_rate": 2.580645161290323e-06, | |
| "loss": 0.4971, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.4838709677419355, | |
| "grad_norm": 3.546097308432004, | |
| "learning_rate": 3.1182795698924735e-06, | |
| "loss": 0.4602, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5645161290322581, | |
| "grad_norm": 3.227669401286306, | |
| "learning_rate": 3.655913978494624e-06, | |
| "loss": 0.4054, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.6451612903225806, | |
| "grad_norm": 3.3339396869053233, | |
| "learning_rate": 4.193548387096774e-06, | |
| "loss": 0.3888, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.7258064516129032, | |
| "grad_norm": 3.4215451938979196, | |
| "learning_rate": 4.731182795698925e-06, | |
| "loss": 0.3594, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.8064516129032258, | |
| "grad_norm": 3.387868343141131, | |
| "learning_rate": 5.268817204301076e-06, | |
| "loss": 0.4685, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.8870967741935484, | |
| "grad_norm": 3.2480841921521577, | |
| "learning_rate": 5.806451612903226e-06, | |
| "loss": 0.3939, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 2.8836899888015326, | |
| "learning_rate": 6.344086021505377e-06, | |
| "loss": 0.412, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.0483870967741935, | |
| "grad_norm": 2.7064083197919104, | |
| "learning_rate": 6.881720430107528e-06, | |
| "loss": 0.2812, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.129032258064516, | |
| "grad_norm": 3.7213743894015754, | |
| "learning_rate": 7.4193548387096784e-06, | |
| "loss": 0.2671, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.2096774193548387, | |
| "grad_norm": 2.6772868092907207, | |
| "learning_rate": 7.956989247311828e-06, | |
| "loss": 0.2735, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.2903225806451613, | |
| "grad_norm": 3.4624454069403523, | |
| "learning_rate": 8.494623655913979e-06, | |
| "loss": 0.2921, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.370967741935484, | |
| "grad_norm": 3.12986197308312, | |
| "learning_rate": 9.03225806451613e-06, | |
| "loss": 0.2797, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.4516129032258065, | |
| "grad_norm": 2.66480640353227, | |
| "learning_rate": 9.56989247311828e-06, | |
| "loss": 0.2614, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.532258064516129, | |
| "grad_norm": 3.0702920459796323, | |
| "learning_rate": 1.0107526881720431e-05, | |
| "loss": 0.3293, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.6129032258064515, | |
| "grad_norm": 2.929888675419076, | |
| "learning_rate": 1.0645161290322582e-05, | |
| "loss": 0.3314, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.6935483870967742, | |
| "grad_norm": 3.3443392917555164, | |
| "learning_rate": 1.118279569892473e-05, | |
| "loss": 0.3273, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.7741935483870968, | |
| "grad_norm": 2.746276842396526, | |
| "learning_rate": 1.1720430107526883e-05, | |
| "loss": 0.279, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.8548387096774195, | |
| "grad_norm": 2.704379170662015, | |
| "learning_rate": 1.2258064516129034e-05, | |
| "loss": 0.2994, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.935483870967742, | |
| "grad_norm": 2.4185003231715383, | |
| "learning_rate": 1.2795698924731184e-05, | |
| "loss": 0.3372, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.0161290322580645, | |
| "grad_norm": 2.3874071062723043, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.253, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.096774193548387, | |
| "grad_norm": 4.1364120455442315, | |
| "learning_rate": 1.3870967741935486e-05, | |
| "loss": 0.1688, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.1774193548387095, | |
| "grad_norm": 2.945339681148867, | |
| "learning_rate": 1.4408602150537636e-05, | |
| "loss": 0.1823, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.258064516129032, | |
| "grad_norm": 2.849277863512327, | |
| "learning_rate": 1.4946236559139787e-05, | |
| "loss": 0.1695, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.338709677419355, | |
| "grad_norm": 2.8089693447606168, | |
| "learning_rate": 1.5483870967741936e-05, | |
| "loss": 0.2041, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.4193548387096775, | |
| "grad_norm": 2.7875058544898175, | |
| "learning_rate": 1.6021505376344087e-05, | |
| "loss": 0.2015, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 2.9140451817511086, | |
| "learning_rate": 1.6559139784946237e-05, | |
| "loss": 0.2188, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.5806451612903225, | |
| "grad_norm": 2.4255786527564642, | |
| "learning_rate": 1.7096774193548388e-05, | |
| "loss": 0.2127, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.661290322580645, | |
| "grad_norm": 2.3925352807787577, | |
| "learning_rate": 1.763440860215054e-05, | |
| "loss": 0.212, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.741935483870968, | |
| "grad_norm": 2.734010191526564, | |
| "learning_rate": 1.817204301075269e-05, | |
| "loss": 0.2057, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.8225806451612905, | |
| "grad_norm": 2.5495288446706583, | |
| "learning_rate": 1.870967741935484e-05, | |
| "loss": 0.2475, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.903225806451613, | |
| "grad_norm": 3.0351828310456996, | |
| "learning_rate": 1.924731182795699e-05, | |
| "loss": 0.253, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.9838709677419355, | |
| "grad_norm": 2.676416201274647, | |
| "learning_rate": 1.978494623655914e-05, | |
| "loss": 0.2596, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 3.064516129032258, | |
| "grad_norm": 2.370849336507847, | |
| "learning_rate": 1.9999841510606067e-05, | |
| "loss": 0.1421, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 3.1451612903225805, | |
| "grad_norm": 2.7377960125859993, | |
| "learning_rate": 1.999887298250285e-05, | |
| "loss": 0.1712, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 3.225806451612903, | |
| "grad_norm": 2.820149350176967, | |
| "learning_rate": 1.9997024061134506e-05, | |
| "loss": 0.1555, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 3.306451612903226, | |
| "grad_norm": 2.1128027512370293, | |
| "learning_rate": 1.999429490929718e-05, | |
| "loss": 0.152, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 3.3870967741935485, | |
| "grad_norm": 2.1099865691307538, | |
| "learning_rate": 1.999068576729065e-05, | |
| "loss": 0.1657, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 3.467741935483871, | |
| "grad_norm": 2.547789552935568, | |
| "learning_rate": 1.9986196952897152e-05, | |
| "loss": 0.1693, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 3.5483870967741935, | |
| "grad_norm": 2.198776749855706, | |
| "learning_rate": 1.9980828861353432e-05, | |
| "loss": 0.1689, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 3.629032258064516, | |
| "grad_norm": 2.3385638015508503, | |
| "learning_rate": 1.9974581965315923e-05, | |
| "loss": 0.2257, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 3.709677419354839, | |
| "grad_norm": 2.1989820629741934, | |
| "learning_rate": 1.996745681481913e-05, | |
| "loss": 0.154, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.790322580645161, | |
| "grad_norm": 2.814460829579746, | |
| "learning_rate": 1.9959454037227215e-05, | |
| "loss": 0.162, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 3.870967741935484, | |
| "grad_norm": 2.068359313330162, | |
| "learning_rate": 1.9950574337178738e-05, | |
| "loss": 0.177, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.9516129032258065, | |
| "grad_norm": 2.7059184532156864, | |
| "learning_rate": 1.994081849652463e-05, | |
| "loss": 0.2009, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 4.032258064516129, | |
| "grad_norm": 1.6009222694764955, | |
| "learning_rate": 1.9930187374259338e-05, | |
| "loss": 0.1212, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 4.112903225806452, | |
| "grad_norm": 2.0703755307458933, | |
| "learning_rate": 1.9918681906445205e-05, | |
| "loss": 0.0961, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 4.193548387096774, | |
| "grad_norm": 2.311594788643909, | |
| "learning_rate": 1.9906303106130038e-05, | |
| "loss": 0.1417, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 4.274193548387097, | |
| "grad_norm": 1.8666211061978775, | |
| "learning_rate": 1.989305206325792e-05, | |
| "loss": 0.1131, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 4.354838709677419, | |
| "grad_norm": 1.9890820794130148, | |
| "learning_rate": 1.9878929944573236e-05, | |
| "loss": 0.1206, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 4.435483870967742, | |
| "grad_norm": 1.7249409190839584, | |
| "learning_rate": 1.9863937993517943e-05, | |
| "loss": 0.1079, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 4.516129032258064, | |
| "grad_norm": 1.9528407771403076, | |
| "learning_rate": 1.9848077530122083e-05, | |
| "loss": 0.1014, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 4.596774193548387, | |
| "grad_norm": 2.1527829290500535, | |
| "learning_rate": 1.9831349950887557e-05, | |
| "loss": 0.1037, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 4.67741935483871, | |
| "grad_norm": 1.623780546041927, | |
| "learning_rate": 1.981375672866517e-05, | |
| "loss": 0.1175, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 4.758064516129032, | |
| "grad_norm": 1.7998585089781174, | |
| "learning_rate": 1.9795299412524948e-05, | |
| "loss": 0.1037, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 4.838709677419355, | |
| "grad_norm": 2.256914320283658, | |
| "learning_rate": 1.977597962761972e-05, | |
| "loss": 0.1359, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 4.919354838709677, | |
| "grad_norm": 1.5695323201796525, | |
| "learning_rate": 1.9755799075042056e-05, | |
| "loss": 0.1377, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 1.8509458520725823, | |
| "learning_rate": 1.9734759531674474e-05, | |
| "loss": 0.1274, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 5.080645161290323, | |
| "grad_norm": 2.1385239357831174, | |
| "learning_rate": 1.971286285003298e-05, | |
| "loss": 0.066, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 5.161290322580645, | |
| "grad_norm": 1.9663061853058708, | |
| "learning_rate": 1.969011095810397e-05, | |
| "loss": 0.0793, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 5.241935483870968, | |
| "grad_norm": 1.8929709744757282, | |
| "learning_rate": 1.9666505859174462e-05, | |
| "loss": 0.0786, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 5.32258064516129, | |
| "grad_norm": 1.6063115339944203, | |
| "learning_rate": 1.9642049631655708e-05, | |
| "loss": 0.0667, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 5.403225806451613, | |
| "grad_norm": 1.6163876431750075, | |
| "learning_rate": 1.9616744428900186e-05, | |
| "loss": 0.0776, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 5.483870967741936, | |
| "grad_norm": 1.7968306100636957, | |
| "learning_rate": 1.9590592479012022e-05, | |
| "loss": 0.0751, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 5.564516129032258, | |
| "grad_norm": 1.9209533257498246, | |
| "learning_rate": 1.9563596084650785e-05, | |
| "loss": 0.096, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 5.645161290322581, | |
| "grad_norm": 1.4487068403130905, | |
| "learning_rate": 1.9535757622828746e-05, | |
| "loss": 0.0889, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 5.725806451612903, | |
| "grad_norm": 1.3952921694858906, | |
| "learning_rate": 1.9507079544701583e-05, | |
| "loss": 0.0732, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 5.806451612903226, | |
| "grad_norm": 1.276828494747591, | |
| "learning_rate": 1.9477564375352564e-05, | |
| "loss": 0.0813, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 5.887096774193548, | |
| "grad_norm": 1.8732979205600613, | |
| "learning_rate": 1.9447214713570205e-05, | |
| "loss": 0.0803, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 5.967741935483871, | |
| "grad_norm": 1.7001388882225135, | |
| "learning_rate": 1.941603323161946e-05, | |
| "loss": 0.0837, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 6.048387096774194, | |
| "grad_norm": 1.126311969801114, | |
| "learning_rate": 1.9384022675006423e-05, | |
| "loss": 0.0543, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 6.129032258064516, | |
| "grad_norm": 1.5736297709731053, | |
| "learning_rate": 1.9351185862236587e-05, | |
| "loss": 0.0545, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 6.209677419354839, | |
| "grad_norm": 1.700997882047633, | |
| "learning_rate": 1.9317525684566686e-05, | |
| "loss": 0.0649, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 6.290322580645161, | |
| "grad_norm": 1.2491520103424365, | |
| "learning_rate": 1.928304510575011e-05, | |
| "loss": 0.0517, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 6.370967741935484, | |
| "grad_norm": 1.9927911356159098, | |
| "learning_rate": 1.924774716177596e-05, | |
| "loss": 0.0575, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 6.451612903225806, | |
| "grad_norm": 1.405928829545799, | |
| "learning_rate": 1.9211634960601726e-05, | |
| "loss": 0.0656, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 6.532258064516129, | |
| "grad_norm": 1.5977023675995665, | |
| "learning_rate": 1.9174711681879627e-05, | |
| "loss": 0.0599, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 6.612903225806452, | |
| "grad_norm": 1.4828352445884125, | |
| "learning_rate": 1.9136980576676648e-05, | |
| "loss": 0.0501, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 6.693548387096774, | |
| "grad_norm": 1.3445540260231166, | |
| "learning_rate": 1.9098444967188308e-05, | |
| "loss": 0.0686, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 6.774193548387097, | |
| "grad_norm": 1.3685484393569762, | |
| "learning_rate": 1.9059108246446107e-05, | |
| "loss": 0.0549, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 6.854838709677419, | |
| "grad_norm": 1.504608696343651, | |
| "learning_rate": 1.90189738780188e-05, | |
| "loss": 0.0521, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 6.935483870967742, | |
| "grad_norm": 2.3124554784826543, | |
| "learning_rate": 1.897804539570742e-05, | |
| "loss": 0.0618, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 7.016129032258065, | |
| "grad_norm": 1.0101556069317463, | |
| "learning_rate": 1.8936326403234125e-05, | |
| "loss": 0.0594, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 7.096774193548387, | |
| "grad_norm": 1.3413145646882874, | |
| "learning_rate": 1.8893820573924907e-05, | |
| "loss": 0.0395, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 7.17741935483871, | |
| "grad_norm": 1.3557891650154248, | |
| "learning_rate": 1.8850531650386154e-05, | |
| "loss": 0.0423, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 7.258064516129032, | |
| "grad_norm": 1.3407092707020813, | |
| "learning_rate": 1.8806463444175108e-05, | |
| "loss": 0.0496, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 7.338709677419355, | |
| "grad_norm": 1.3060068492508308, | |
| "learning_rate": 1.8761619835464265e-05, | |
| "loss": 0.0404, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 7.419354838709677, | |
| "grad_norm": 1.0175150605179055, | |
| "learning_rate": 1.8716004772699724e-05, | |
| "loss": 0.0428, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "grad_norm": 1.3413688042908085, | |
| "learning_rate": 1.8669622272253555e-05, | |
| "loss": 0.0416, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 7.580645161290323, | |
| "grad_norm": 1.153505595164974, | |
| "learning_rate": 1.862247641807012e-05, | |
| "loss": 0.0579, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 7.661290322580645, | |
| "grad_norm": 1.2354313686927156, | |
| "learning_rate": 1.857457136130651e-05, | |
| "loss": 0.0481, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 7.741935483870968, | |
| "grad_norm": 1.0931719489898546, | |
| "learning_rate": 1.8525911319967043e-05, | |
| "loss": 0.0486, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 7.82258064516129, | |
| "grad_norm": 1.5940570451158567, | |
| "learning_rate": 1.8476500578531843e-05, | |
| "loss": 0.0451, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 7.903225806451613, | |
| "grad_norm": 1.1732082302822824, | |
| "learning_rate": 1.842634348757964e-05, | |
| "loss": 0.043, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 7.983870967741936, | |
| "grad_norm": 1.4772573693475428, | |
| "learning_rate": 1.837544446340465e-05, | |
| "loss": 0.0528, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 8.064516129032258, | |
| "grad_norm": 0.8955083646170174, | |
| "learning_rate": 1.8323807987627784e-05, | |
| "loss": 0.0334, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 8.14516129032258, | |
| "grad_norm": 0.9631422107720344, | |
| "learning_rate": 1.827143860680199e-05, | |
| "loss": 0.0291, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 8.225806451612904, | |
| "grad_norm": 1.2099608078687119, | |
| "learning_rate": 1.821834093201196e-05, | |
| "loss": 0.0337, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 8.306451612903226, | |
| "grad_norm": 1.118942530993604, | |
| "learning_rate": 1.8164519638468127e-05, | |
| "loss": 0.0311, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 8.387096774193548, | |
| "grad_norm": 1.107506450946018, | |
| "learning_rate": 1.8109979465095014e-05, | |
| "loss": 0.0285, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 8.46774193548387, | |
| "grad_norm": 1.107697878261832, | |
| "learning_rate": 1.805472521411397e-05, | |
| "loss": 0.033, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 8.548387096774194, | |
| "grad_norm": 1.1679751114840105, | |
| "learning_rate": 1.799876175062035e-05, | |
| "loss": 0.0362, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 8.629032258064516, | |
| "grad_norm": 0.9679470036614197, | |
| "learning_rate": 1.7942094002155122e-05, | |
| "loss": 0.0357, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 8.709677419354838, | |
| "grad_norm": 1.1615481659249676, | |
| "learning_rate": 1.7884726958271033e-05, | |
| "loss": 0.036, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 8.790322580645162, | |
| "grad_norm": 1.3403691682577592, | |
| "learning_rate": 1.7826665670093258e-05, | |
| "loss": 0.0332, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 8.870967741935484, | |
| "grad_norm": 1.123563696070656, | |
| "learning_rate": 1.7767915249874666e-05, | |
| "loss": 0.0375, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 8.951612903225806, | |
| "grad_norm": 1.2116386009188533, | |
| "learning_rate": 1.7708480870545684e-05, | |
| "loss": 0.044, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 9.03225806451613, | |
| "grad_norm": 0.7979141814706628, | |
| "learning_rate": 1.7648367765258823e-05, | |
| "loss": 0.0315, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 9.112903225806452, | |
| "grad_norm": 2.4300360668926886, | |
| "learning_rate": 1.758758122692791e-05, | |
| "loss": 0.0321, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 9.193548387096774, | |
| "grad_norm": 1.052541564410572, | |
| "learning_rate": 1.7526126607762043e-05, | |
| "loss": 0.0314, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 9.274193548387096, | |
| "grad_norm": 0.9024632271562477, | |
| "learning_rate": 1.746400931879434e-05, | |
| "loss": 0.0296, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 9.35483870967742, | |
| "grad_norm": 1.0547389461651557, | |
| "learning_rate": 1.7401234829405492e-05, | |
| "loss": 0.0264, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 9.435483870967742, | |
| "grad_norm": 1.1560142054705027, | |
| "learning_rate": 1.7337808666842196e-05, | |
| "loss": 0.0256, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 9.516129032258064, | |
| "grad_norm": 1.1516296207125127, | |
| "learning_rate": 1.7273736415730488e-05, | |
| "loss": 0.0355, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 9.596774193548388, | |
| "grad_norm": 0.9268725755353335, | |
| "learning_rate": 1.7209023717584013e-05, | |
| "loss": 0.0265, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 9.67741935483871, | |
| "grad_norm": 1.0873005455780382, | |
| "learning_rate": 1.714367627030729e-05, | |
| "loss": 0.0344, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 9.758064516129032, | |
| "grad_norm": 0.9095844635646306, | |
| "learning_rate": 1.7077699827694038e-05, | |
| "loss": 0.0362, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 9.838709677419354, | |
| "grad_norm": 1.2515206515268222, | |
| "learning_rate": 1.7011100198920528e-05, | |
| "loss": 0.0297, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 9.919354838709678, | |
| "grad_norm": 1.0327741781324877, | |
| "learning_rate": 1.6943883248034116e-05, | |
| "loss": 0.028, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.9826424300809284, | |
| "learning_rate": 1.68760548934369e-05, | |
| "loss": 0.0327, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 10.080645161290322, | |
| "grad_norm": 0.9383755847813475, | |
| "learning_rate": 1.6807621107364613e-05, | |
| "loss": 0.0207, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 10.161290322580646, | |
| "grad_norm": 0.9662163918019707, | |
| "learning_rate": 1.6738587915360795e-05, | |
| "loss": 0.0193, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 10.241935483870968, | |
| "grad_norm": 0.8536385882747535, | |
| "learning_rate": 1.6668961395746203e-05, | |
| "loss": 0.0184, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 10.32258064516129, | |
| "grad_norm": 0.788293552656787, | |
| "learning_rate": 1.6598747679083657e-05, | |
| "loss": 0.0303, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 10.403225806451612, | |
| "grad_norm": 0.9845898875068094, | |
| "learning_rate": 1.652795294763824e-05, | |
| "loss": 0.0216, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 10.483870967741936, | |
| "grad_norm": 0.6291325552961899, | |
| "learning_rate": 1.6456583434832943e-05, | |
| "loss": 0.021, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 10.564516129032258, | |
| "grad_norm": 0.8801054632267955, | |
| "learning_rate": 1.6384645424699835e-05, | |
| "loss": 0.0245, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 10.64516129032258, | |
| "grad_norm": 0.9368371046853895, | |
| "learning_rate": 1.631214525132674e-05, | |
| "loss": 0.0199, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 10.725806451612904, | |
| "grad_norm": 1.6268394900105974, | |
| "learning_rate": 1.6239089298299524e-05, | |
| "loss": 0.0276, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 10.806451612903226, | |
| "grad_norm": 0.999434975658911, | |
| "learning_rate": 1.616548399814006e-05, | |
| "loss": 0.0232, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 10.887096774193548, | |
| "grad_norm": 0.7559937092929928, | |
| "learning_rate": 1.60913358317398e-05, | |
| "loss": 0.0216, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 10.967741935483872, | |
| "grad_norm": 0.9110927404449887, | |
| "learning_rate": 1.6016651327789172e-05, | |
| "loss": 0.023, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 11.048387096774194, | |
| "grad_norm": 0.7820630863655826, | |
| "learning_rate": 1.594143706220273e-05, | |
| "loss": 0.0175, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 11.129032258064516, | |
| "grad_norm": 0.8615839081674891, | |
| "learning_rate": 1.5865699657540124e-05, | |
| "loss": 0.0179, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 11.209677419354838, | |
| "grad_norm": 0.7485778670469995, | |
| "learning_rate": 1.578944578242302e-05, | |
| "loss": 0.0165, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 11.290322580645162, | |
| "grad_norm": 0.5653864836565881, | |
| "learning_rate": 1.5712682150947926e-05, | |
| "loss": 0.0138, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 11.370967741935484, | |
| "grad_norm": 1.0152034094965365, | |
| "learning_rate": 1.5635415522094995e-05, | |
| "loss": 0.0213, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 11.451612903225806, | |
| "grad_norm": 0.7290881851961156, | |
| "learning_rate": 1.555765269913294e-05, | |
| "loss": 0.0155, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 11.532258064516128, | |
| "grad_norm": 0.9439936389696655, | |
| "learning_rate": 1.5479400529019987e-05, | |
| "loss": 0.0144, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 11.612903225806452, | |
| "grad_norm": 0.7958692168661642, | |
| "learning_rate": 1.5400665901801007e-05, | |
| "loss": 0.018, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 11.693548387096774, | |
| "grad_norm": 0.7223477960561964, | |
| "learning_rate": 1.5321455750000864e-05, | |
| "loss": 0.0135, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 11.774193548387096, | |
| "grad_norm": 0.6593842682896188, | |
| "learning_rate": 1.5241777048013999e-05, | |
| "loss": 0.0148, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 11.85483870967742, | |
| "grad_norm": 0.8453520234181606, | |
| "learning_rate": 1.5161636811490353e-05, | |
| "loss": 0.0153, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 11.935483870967742, | |
| "grad_norm": 0.8025417174419947, | |
| "learning_rate": 1.5081042096717629e-05, | |
| "loss": 0.0164, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 12.016129032258064, | |
| "grad_norm": 0.6831031548209219, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.0148, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 12.096774193548388, | |
| "grad_norm": 0.7608254514246531, | |
| "learning_rate": 1.491851765703329e-05, | |
| "loss": 0.012, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 12.17741935483871, | |
| "grad_norm": 0.5136759705172103, | |
| "learning_rate": 1.483660224227667e-05, | |
| "loss": 0.0068, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 12.258064516129032, | |
| "grad_norm": 0.636212407574805, | |
| "learning_rate": 1.4754260968320953e-05, | |
| "loss": 0.0127, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 12.338709677419354, | |
| "grad_norm": 0.44635685167884065, | |
| "learning_rate": 1.4671501085253544e-05, | |
| "loss": 0.0119, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 12.419354838709678, | |
| "grad_norm": 0.48363924785074075, | |
| "learning_rate": 1.4588329880020063e-05, | |
| "loss": 0.0103, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "grad_norm": 0.5439281715798179, | |
| "learning_rate": 1.4504754675782731e-05, | |
| "loss": 0.0128, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 12.580645161290322, | |
| "grad_norm": 0.5354009938961187, | |
| "learning_rate": 1.4420782831275593e-05, | |
| "loss": 0.0116, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 12.661290322580646, | |
| "grad_norm": 0.6688889724203744, | |
| "learning_rate": 1.4336421740156554e-05, | |
| "loss": 0.0077, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 12.741935483870968, | |
| "grad_norm": 0.6812534753778359, | |
| "learning_rate": 1.4251678830356408e-05, | |
| "loss": 0.0103, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 12.82258064516129, | |
| "grad_norm": 0.6501175433498573, | |
| "learning_rate": 1.4166561563424787e-05, | |
| "loss": 0.0112, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 12.903225806451612, | |
| "grad_norm": 0.650751022479761, | |
| "learning_rate": 1.4081077433873193e-05, | |
| "loss": 0.0113, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 12.983870967741936, | |
| "grad_norm": 0.7114897848206225, | |
| "learning_rate": 1.3995233968515105e-05, | |
| "loss": 0.0089, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 13.064516129032258, | |
| "grad_norm": 0.6357652159202873, | |
| "learning_rate": 1.3909038725803243e-05, | |
| "loss": 0.0062, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 13.14516129032258, | |
| "grad_norm": 0.5540274340711291, | |
| "learning_rate": 1.3822499295164073e-05, | |
| "loss": 0.0079, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 13.225806451612904, | |
| "grad_norm": 0.42600296520707526, | |
| "learning_rate": 1.3735623296329537e-05, | |
| "loss": 0.0067, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 13.306451612903226, | |
| "grad_norm": 0.45535092056177595, | |
| "learning_rate": 1.3648418378666164e-05, | |
| "loss": 0.0075, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 13.387096774193548, | |
| "grad_norm": 0.5270286975326697, | |
| "learning_rate": 1.3560892220501532e-05, | |
| "loss": 0.0057, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 13.46774193548387, | |
| "grad_norm": 0.3997583004602451, | |
| "learning_rate": 1.3473052528448203e-05, | |
| "loss": 0.005, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 13.548387096774194, | |
| "grad_norm": 0.4555926450092979, | |
| "learning_rate": 1.3384907036725174e-05, | |
| "loss": 0.0061, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 13.629032258064516, | |
| "grad_norm": 0.5495172127346515, | |
| "learning_rate": 1.3296463506476862e-05, | |
| "loss": 0.0063, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 13.709677419354838, | |
| "grad_norm": 0.25674804312426736, | |
| "learning_rate": 1.3207729725089757e-05, | |
| "loss": 0.0056, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 13.790322580645162, | |
| "grad_norm": 0.4562073850046896, | |
| "learning_rate": 1.3118713505506743e-05, | |
| "loss": 0.005, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 13.870967741935484, | |
| "grad_norm": 0.6381309636636849, | |
| "learning_rate": 1.3029422685539176e-05, | |
| "loss": 0.0091, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 13.951612903225806, | |
| "grad_norm": 0.4675515960872932, | |
| "learning_rate": 1.2939865127176771e-05, | |
| "loss": 0.0067, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 14.03225806451613, | |
| "grad_norm": 0.48524387355096776, | |
| "learning_rate": 1.2850048715895354e-05, | |
| "loss": 0.0053, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 14.112903225806452, | |
| "grad_norm": 0.48560164331553235, | |
| "learning_rate": 1.2759981359962561e-05, | |
| "loss": 0.0046, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 14.193548387096774, | |
| "grad_norm": 0.31174892099656043, | |
| "learning_rate": 1.2669670989741519e-05, | |
| "loss": 0.0056, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 14.274193548387096, | |
| "grad_norm": 0.36686246972144787, | |
| "learning_rate": 1.257912555699257e-05, | |
| "loss": 0.0032, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 14.35483870967742, | |
| "grad_norm": 0.4026724810879191, | |
| "learning_rate": 1.2488353034173146e-05, | |
| "loss": 0.0043, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 14.435483870967742, | |
| "grad_norm": 0.5052593922852059, | |
| "learning_rate": 1.2397361413735785e-05, | |
| "loss": 0.0035, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 14.516129032258064, | |
| "grad_norm": 0.20112599123413197, | |
| "learning_rate": 1.2306158707424402e-05, | |
| "loss": 0.0015, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 14.596774193548388, | |
| "grad_norm": 0.7258015458821793, | |
| "learning_rate": 1.2214752945568875e-05, | |
| "loss": 0.0041, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 14.67741935483871, | |
| "grad_norm": 0.5402118374404563, | |
| "learning_rate": 1.2123152176377962e-05, | |
| "loss": 0.0027, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 14.758064516129032, | |
| "grad_norm": 0.4880587083553651, | |
| "learning_rate": 1.2031364465230672e-05, | |
| "loss": 0.0058, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 14.838709677419354, | |
| "grad_norm": 0.3108599971494139, | |
| "learning_rate": 1.193939789396611e-05, | |
| "loss": 0.0052, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 14.919354838709678, | |
| "grad_norm": 0.40475472593096934, | |
| "learning_rate": 1.1847260560171895e-05, | |
| "loss": 0.0044, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "grad_norm": 0.43803638841417536, | |
| "learning_rate": 1.1754960576471138e-05, | |
| "loss": 0.0041, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 15.080645161290322, | |
| "grad_norm": 0.29020838852749026, | |
| "learning_rate": 1.1662506069808166e-05, | |
| "loss": 0.002, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 15.161290322580646, | |
| "grad_norm": 0.21313654666872428, | |
| "learning_rate": 1.1569905180732927e-05, | |
| "loss": 0.0015, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 15.241935483870968, | |
| "grad_norm": 0.5669675735813504, | |
| "learning_rate": 1.1477166062684246e-05, | |
| "loss": 0.0027, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 15.32258064516129, | |
| "grad_norm": 0.15121624329663833, | |
| "learning_rate": 1.1384296881271892e-05, | |
| "loss": 0.0019, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 15.403225806451612, | |
| "grad_norm": 0.36924431892389764, | |
| "learning_rate": 1.1291305813557616e-05, | |
| "loss": 0.0026, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 15.483870967741936, | |
| "grad_norm": 0.29277438985055243, | |
| "learning_rate": 1.1198201047335173e-05, | |
| "loss": 0.0022, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 15.564516129032258, | |
| "grad_norm": 0.10443410826250868, | |
| "learning_rate": 1.1104990780409382e-05, | |
| "loss": 0.0025, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 15.64516129032258, | |
| "grad_norm": 0.22411880976768567, | |
| "learning_rate": 1.1011683219874324e-05, | |
| "loss": 0.0017, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 15.725806451612904, | |
| "grad_norm": 0.2002018976808773, | |
| "learning_rate": 1.0918286581390703e-05, | |
| "loss": 0.0013, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 15.806451612903226, | |
| "grad_norm": 0.5420602502335252, | |
| "learning_rate": 1.082480908846247e-05, | |
| "loss": 0.0015, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 15.887096774193548, | |
| "grad_norm": 0.1443273425714025, | |
| "learning_rate": 1.0731258971712762e-05, | |
| "loss": 0.0025, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 15.967741935483872, | |
| "grad_norm": 0.26392860949528996, | |
| "learning_rate": 1.0637644468159167e-05, | |
| "loss": 0.0027, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 16.048387096774192, | |
| "grad_norm": 0.1460772602237691, | |
| "learning_rate": 1.0543973820488507e-05, | |
| "loss": 0.0011, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 16.129032258064516, | |
| "grad_norm": 0.2926292759678528, | |
| "learning_rate": 1.0450255276331029e-05, | |
| "loss": 0.0012, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 16.20967741935484, | |
| "grad_norm": 0.2734919531646877, | |
| "learning_rate": 1.0356497087534245e-05, | |
| "loss": 0.0011, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 16.29032258064516, | |
| "grad_norm": 0.1765395508424354, | |
| "learning_rate": 1.0262707509436343e-05, | |
| "loss": 0.0015, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 16.370967741935484, | |
| "grad_norm": 0.3588111466274959, | |
| "learning_rate": 1.0168894800139311e-05, | |
| "loss": 0.0029, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 16.451612903225808, | |
| "grad_norm": 0.11715757713182169, | |
| "learning_rate": 1.0075067219781834e-05, | |
| "loss": 0.0009, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 16.532258064516128, | |
| "grad_norm": 0.36694923017606906, | |
| "learning_rate": 9.981233029811964e-06, | |
| "loss": 0.0007, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 16.612903225806452, | |
| "grad_norm": 0.5028653447738664, | |
| "learning_rate": 9.887400492259743e-06, | |
| "loss": 0.0018, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 16.693548387096776, | |
| "grad_norm": 0.033236693859072915, | |
| "learning_rate": 9.793577869009706e-06, | |
| "loss": 0.0009, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 16.774193548387096, | |
| "grad_norm": 0.21980053374053776, | |
| "learning_rate": 9.69977342107345e-06, | |
| "loss": 0.0016, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 16.85483870967742, | |
| "grad_norm": 0.3668029563033362, | |
| "learning_rate": 9.605995407862248e-06, | |
| "loss": 0.0008, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 16.93548387096774, | |
| "grad_norm": 0.285333535627568, | |
| "learning_rate": 9.512252086459803e-06, | |
| "loss": 0.0011, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 17.016129032258064, | |
| "grad_norm": 0.8746112024776143, | |
| "learning_rate": 9.418551710895243e-06, | |
| "loss": 0.0031, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 17.096774193548388, | |
| "grad_norm": 0.2635238198809026, | |
| "learning_rate": 9.324902531416348e-06, | |
| "loss": 0.0005, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 17.177419354838708, | |
| "grad_norm": 0.012658501262013781, | |
| "learning_rate": 9.231312793763115e-06, | |
| "loss": 0.0002, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 17.258064516129032, | |
| "grad_norm": 0.19374740237106364, | |
| "learning_rate": 9.137790738441736e-06, | |
| "loss": 0.0015, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 17.338709677419356, | |
| "grad_norm": 0.03003764453957054, | |
| "learning_rate": 9.04434459999902e-06, | |
| "loss": 0.0002, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 17.419354838709676, | |
| "grad_norm": 0.14865760384746282, | |
| "learning_rate": 8.95098260629736e-06, | |
| "loss": 0.0007, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 17.5, | |
| "grad_norm": 0.016975817095253525, | |
| "learning_rate": 8.857712977790256e-06, | |
| "loss": 0.0005, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 17.580645161290324, | |
| "grad_norm": 0.00817856589085904, | |
| "learning_rate": 8.764543926798536e-06, | |
| "loss": 0.0003, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 17.661290322580644, | |
| "grad_norm": 0.019239876738696586, | |
| "learning_rate": 8.67148365678724e-06, | |
| "loss": 0.0003, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 17.741935483870968, | |
| "grad_norm": 0.025903258659712047, | |
| "learning_rate": 8.57854036164334e-06, | |
| "loss": 0.0006, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 17.822580645161292, | |
| "grad_norm": 0.11408373313729046, | |
| "learning_rate": 8.485722224954237e-06, | |
| "loss": 0.0003, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 17.903225806451612, | |
| "grad_norm": 0.012099095178522009, | |
| "learning_rate": 8.39303741928724e-06, | |
| "loss": 0.0002, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 17.983870967741936, | |
| "grad_norm": 0.005552296409129616, | |
| "learning_rate": 8.300494105469957e-06, | |
| "loss": 0.0001, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 18.06451612903226, | |
| "grad_norm": 0.004616427536520861, | |
| "learning_rate": 8.20810043187175e-06, | |
| "loss": 0.0001, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 18.14516129032258, | |
| "grad_norm": 0.05281261175846121, | |
| "learning_rate": 8.115864533686259e-06, | |
| "loss": 0.0021, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 18.225806451612904, | |
| "grad_norm": 0.002757070059738439, | |
| "learning_rate": 8.023794532215134e-06, | |
| "loss": 0.0001, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 18.306451612903224, | |
| "grad_norm": 0.008303366626556355, | |
| "learning_rate": 7.93189853415293e-06, | |
| "loss": 0.0001, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 18.387096774193548, | |
| "grad_norm": 0.00516447071993667, | |
| "learning_rate": 7.840184630873337e-06, | |
| "loss": 0.0001, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 18.467741935483872, | |
| "grad_norm": 0.003628992783848169, | |
| "learning_rate": 7.748660897716737e-06, | |
| "loss": 0.0001, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 18.548387096774192, | |
| "grad_norm": 0.00429379090116785, | |
| "learning_rate": 7.657335393279179e-06, | |
| "loss": 0.0004, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 18.629032258064516, | |
| "grad_norm": 0.008643267898364707, | |
| "learning_rate": 7.566216158702813e-06, | |
| "loss": 0.0001, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 18.70967741935484, | |
| "grad_norm": 0.005551251374713132, | |
| "learning_rate": 7.475311216967893e-06, | |
| "loss": 0.0001, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 18.79032258064516, | |
| "grad_norm": 0.014305204262863764, | |
| "learning_rate": 7.384628572186334e-06, | |
| "loss": 0.0001, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 18.870967741935484, | |
| "grad_norm": 0.09431284046956707, | |
| "learning_rate": 7.294176208896988e-06, | |
| "loss": 0.0002, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 18.951612903225808, | |
| "grad_norm": 0.015681823652013132, | |
| "learning_rate": 7.203962091362584e-06, | |
| "loss": 0.0001, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 19.032258064516128, | |
| "grad_norm": 0.002861391926164327, | |
| "learning_rate": 7.113994162868496e-06, | |
| "loss": 0.0002, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 19.112903225806452, | |
| "grad_norm": 0.002131347455172179, | |
| "learning_rate": 7.024280345023331e-06, | |
| "loss": 0.0001, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 19.193548387096776, | |
| "grad_norm": 0.05586002706856135, | |
| "learning_rate": 6.934828537061453e-06, | |
| "loss": 0.0, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 19.274193548387096, | |
| "grad_norm": 0.003294717517805611, | |
| "learning_rate": 6.845646615147445e-06, | |
| "loss": 0.0, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 19.35483870967742, | |
| "grad_norm": 0.002670222446490253, | |
| "learning_rate": 6.756742431682633e-06, | |
| "loss": 0.0, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 19.43548387096774, | |
| "grad_norm": 0.0017738822116875497, | |
| "learning_rate": 6.668123814613669e-06, | |
| "loss": 0.0, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 19.516129032258064, | |
| "grad_norm": 0.0024754597033006137, | |
| "learning_rate": 6.579798566743314e-06, | |
| "loss": 0.0, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 19.596774193548388, | |
| "grad_norm": 0.002356334378581499, | |
| "learning_rate": 6.4917744650433785e-06, | |
| "loss": 0.0, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 19.677419354838708, | |
| "grad_norm": 0.2337044956153478, | |
| "learning_rate": 6.404059259969982e-06, | |
| "loss": 0.0002, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 19.758064516129032, | |
| "grad_norm": 0.0025162021346608904, | |
| "learning_rate": 6.31666067478113e-06, | |
| "loss": 0.0, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 19.838709677419356, | |
| "grad_norm": 0.0022045768873613096, | |
| "learning_rate": 6.229586404856685e-06, | |
| "loss": 0.0, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 19.919354838709676, | |
| "grad_norm": 0.012071649254350987, | |
| "learning_rate": 6.142844117020788e-06, | |
| "loss": 0.0, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.0017997326901498427, | |
| "learning_rate": 6.056441448866817e-06, | |
| "loss": 0.0, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 20.080645161290324, | |
| "grad_norm": 0.0017735556664215946, | |
| "learning_rate": 5.970386008084877e-06, | |
| "loss": 0.0, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 20.161290322580644, | |
| "grad_norm": 0.002289470916330648, | |
| "learning_rate": 5.8846853717919785e-06, | |
| "loss": 0.0, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 20.241935483870968, | |
| "grad_norm": 0.0016837583557860226, | |
| "learning_rate": 5.799347085864851e-06, | |
| "loss": 0.0, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 20.322580645161292, | |
| "grad_norm": 0.001997418194694724, | |
| "learning_rate": 5.714378664275555e-06, | |
| "loss": 0.0, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 20.403225806451612, | |
| "grad_norm": 0.00143746162194969, | |
| "learning_rate": 5.629787588429869e-06, | |
| "loss": 0.0, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 20.483870967741936, | |
| "grad_norm": 0.0014074403025739481, | |
| "learning_rate": 5.5455813065085565e-06, | |
| "loss": 0.0, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 20.56451612903226, | |
| "grad_norm": 0.0017208062986359693, | |
| "learning_rate": 5.461767232811583e-06, | |
| "loss": 0.0, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 20.64516129032258, | |
| "grad_norm": 0.0015097895515179302, | |
| "learning_rate": 5.3783527471052445e-06, | |
| "loss": 0.0, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 20.725806451612904, | |
| "grad_norm": 0.0014681223945534065, | |
| "learning_rate": 5.295345193972445e-06, | |
| "loss": 0.0, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 20.806451612903224, | |
| "grad_norm": 0.0015324929212407845, | |
| "learning_rate": 5.212751882165963e-06, | |
| "loss": 0.0, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 20.887096774193548, | |
| "grad_norm": 0.0017375035160193378, | |
| "learning_rate": 5.130580083964942e-06, | |
| "loss": 0.0, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 20.967741935483872, | |
| "grad_norm": 0.0014209318351752068, | |
| "learning_rate": 5.048837034534566e-06, | |
| "loss": 0.0, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 21.048387096774192, | |
| "grad_norm": 0.0018740450897340368, | |
| "learning_rate": 4.9675299312890234e-06, | |
| "loss": 0.0, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 21.129032258064516, | |
| "grad_norm": 0.0014027564463943646, | |
| "learning_rate": 4.886665933257755e-06, | |
| "loss": 0.0, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 21.20967741935484, | |
| "grad_norm": 0.0014400553594467746, | |
| "learning_rate": 4.8062521604551245e-06, | |
| "loss": 0.0, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 21.29032258064516, | |
| "grad_norm": 0.0013379121505017032, | |
| "learning_rate": 4.7262956932534995e-06, | |
| "loss": 0.0, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 21.370967741935484, | |
| "grad_norm": 0.001224822178534575, | |
| "learning_rate": 4.646803571759842e-06, | |
| "loss": 0.0, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 21.451612903225808, | |
| "grad_norm": 0.0014714368792221118, | |
| "learning_rate": 4.567782795195816e-06, | |
| "loss": 0.0, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 21.532258064516128, | |
| "grad_norm": 0.001267162711793656, | |
| "learning_rate": 4.489240321281507e-06, | |
| "loss": 0.0, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 21.612903225806452, | |
| "grad_norm": 0.001393308266383759, | |
| "learning_rate": 4.411183065622832e-06, | |
| "loss": 0.0, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 21.693548387096776, | |
| "grad_norm": 0.0017841871870553849, | |
| "learning_rate": 4.333617901102592e-06, | |
| "loss": 0.0, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 21.774193548387096, | |
| "grad_norm": 0.0013599642390236806, | |
| "learning_rate": 4.256551657275328e-06, | |
| "loss": 0.0, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 21.85483870967742, | |
| "grad_norm": 0.0012418118766336196, | |
| "learning_rate": 4.179991119766008e-06, | |
| "loss": 0.0, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 21.93548387096774, | |
| "grad_norm": 0.001022682579002671, | |
| "learning_rate": 4.103943029672517e-06, | |
| "loss": 0.0, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 22.016129032258064, | |
| "grad_norm": 0.0017336969468103674, | |
| "learning_rate": 4.028414082972141e-06, | |
| "loss": 0.0, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 22.096774193548388, | |
| "grad_norm": 0.0011619488754235326, | |
| "learning_rate": 3.95341092993197e-06, | |
| "loss": 0.0, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 22.177419354838708, | |
| "grad_norm": 0.001358134552476923, | |
| "learning_rate": 3.878940174523371e-06, | |
| "loss": 0.0, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 22.258064516129032, | |
| "grad_norm": 0.0010708083602066329, | |
| "learning_rate": 3.80500837384049e-06, | |
| "loss": 0.0, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 22.338709677419356, | |
| "grad_norm": 0.0009492366556284447, | |
| "learning_rate": 3.7316220375229075e-06, | |
| "loss": 0.0, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 22.419354838709676, | |
| "grad_norm": 0.0010276309037180683, | |
| "learning_rate": 3.658787627182495e-06, | |
| "loss": 0.0, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 22.5, | |
| "grad_norm": 0.0011605959959405084, | |
| "learning_rate": 3.586511555834434e-06, | |
| "loss": 0.0, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 22.580645161290324, | |
| "grad_norm": 0.001171193414885611, | |
| "learning_rate": 3.5148001873325855e-06, | |
| "loss": 0.0, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 22.661290322580644, | |
| "grad_norm": 0.0009208173258094716, | |
| "learning_rate": 3.4436598358091577e-06, | |
| "loss": 0.0, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 22.741935483870968, | |
| "grad_norm": 0.001232800367231175, | |
| "learning_rate": 3.3730967651187286e-06, | |
| "loss": 0.0, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 22.822580645161292, | |
| "grad_norm": 0.0012117187143629575, | |
| "learning_rate": 3.3031171882867387e-06, | |
| "loss": 0.0, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 22.903225806451612, | |
| "grad_norm": 0.0011761841034011726, | |
| "learning_rate": 3.233727266962425e-06, | |
| "loss": 0.0, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 22.983870967741936, | |
| "grad_norm": 0.0009407741858387966, | |
| "learning_rate": 3.1649331108763157e-06, | |
| "loss": 0.0, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 23.06451612903226, | |
| "grad_norm": 0.0011025362817417613, | |
| "learning_rate": 3.0967407773022382e-06, | |
| "loss": 0.0, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 23.14516129032258, | |
| "grad_norm": 0.0011897362184289289, | |
| "learning_rate": 3.0291562705240107e-06, | |
| "loss": 0.0, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 23.225806451612904, | |
| "grad_norm": 0.0009800186473422371, | |
| "learning_rate": 2.962185541306748e-06, | |
| "loss": 0.0, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 23.306451612903224, | |
| "grad_norm": 0.001094603521413206, | |
| "learning_rate": 2.8958344863729294e-06, | |
| "loss": 0.0, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 23.387096774193548, | |
| "grad_norm": 0.0008637469857871104, | |
| "learning_rate": 2.830108947883151e-06, | |
| "loss": 0.0, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 23.467741935483872, | |
| "grad_norm": 0.0009255780472815515, | |
| "learning_rate": 2.765014712921782e-06, | |
| "loss": 0.0, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 23.548387096774192, | |
| "grad_norm": 0.0011304099551927653, | |
| "learning_rate": 2.700557512987375e-06, | |
| "loss": 0.0, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 23.629032258064516, | |
| "grad_norm": 0.0011542295954931952, | |
| "learning_rate": 2.6367430234880286e-06, | |
| "loss": 0.0, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 23.70967741935484, | |
| "grad_norm": 0.0009933082873024395, | |
| "learning_rate": 2.573576863241669e-06, | |
| "loss": 0.0, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 23.79032258064516, | |
| "grad_norm": 0.001108485678698975, | |
| "learning_rate": 2.511064593981324e-06, | |
| "loss": 0.0, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 23.870967741935484, | |
| "grad_norm": 0.0010882891128127124, | |
| "learning_rate": 2.4492117198654043e-06, | |
| "loss": 0.0, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 23.951612903225808, | |
| "grad_norm": 0.001286203801565166, | |
| "learning_rate": 2.388023686993074e-06, | |
| "loss": 0.0, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 24.032258064516128, | |
| "grad_norm": 0.0009232469275269296, | |
| "learning_rate": 2.327505882924724e-06, | |
| "loss": 0.0, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 24.112903225806452, | |
| "grad_norm": 0.0011312578699948532, | |
| "learning_rate": 2.2676636362076075e-06, | |
| "loss": 0.0, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 24.193548387096776, | |
| "grad_norm": 0.0011921940128787147, | |
| "learning_rate": 2.208502215906655e-06, | |
| "loss": 0.0, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 24.274193548387096, | |
| "grad_norm": 0.0010399187966345959, | |
| "learning_rate": 2.150026831140535e-06, | |
| "loss": 0.0, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 24.35483870967742, | |
| "grad_norm": 0.0008843031344597534, | |
| "learning_rate": 2.092242630623016e-06, | |
| "loss": 0.0, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 24.43548387096774, | |
| "grad_norm": 0.0008571839368731308, | |
| "learning_rate": 2.035154702209602e-06, | |
| "loss": 0.0, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 24.516129032258064, | |
| "grad_norm": 0.0008882619066774779, | |
| "learning_rate": 1.9787680724495617e-06, | |
| "loss": 0.0, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 24.596774193548388, | |
| "grad_norm": 0.001195579373470273, | |
| "learning_rate": 1.9230877061433505e-06, | |
| "loss": 0.0, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 24.677419354838708, | |
| "grad_norm": 0.0011135595219953815, | |
| "learning_rate": 1.8681185059054474e-06, | |
| "loss": 0.0, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 24.758064516129032, | |
| "grad_norm": 0.0010772677231782838, | |
| "learning_rate": 1.8138653117326976e-06, | |
| "loss": 0.0, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 24.838709677419356, | |
| "grad_norm": 0.001084788491202543, | |
| "learning_rate": 1.7603329005781445e-06, | |
| "loss": 0.0, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 24.919354838709676, | |
| "grad_norm": 0.0011874752042342596, | |
| "learning_rate": 1.7075259859304372e-06, | |
| "loss": 0.0, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "grad_norm": 0.0008254313533727779, | |
| "learning_rate": 1.6554492173987924e-06, | |
| "loss": 0.0, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 25.080645161290324, | |
| "grad_norm": 0.001062142981942227, | |
| "learning_rate": 1.60410718030361e-06, | |
| "loss": 0.0, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 25.161290322580644, | |
| "grad_norm": 0.00096835703925917, | |
| "learning_rate": 1.553504395272747e-06, | |
| "loss": 0.0, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 25.241935483870968, | |
| "grad_norm": 0.0008848134211185202, | |
| "learning_rate": 1.5036453178434573e-06, | |
| "loss": 0.0, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 25.322580645161292, | |
| "grad_norm": 0.0008640131693334732, | |
| "learning_rate": 1.454534338070106e-06, | |
| "loss": 0.0, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 25.403225806451612, | |
| "grad_norm": 0.001004330343739157, | |
| "learning_rate": 1.406175780137623e-06, | |
| "loss": 0.0, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 25.483870967741936, | |
| "grad_norm": 0.0008459293868090642, | |
| "learning_rate": 1.3585739019807554e-06, | |
| "loss": 0.0, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 25.56451612903226, | |
| "grad_norm": 0.0011333145138910576, | |
| "learning_rate": 1.3117328949091634e-06, | |
| "loss": 0.0, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 25.64516129032258, | |
| "grad_norm": 0.000830798905023494, | |
| "learning_rate": 1.2656568832383786e-06, | |
| "loss": 0.0, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 25.725806451612904, | |
| "grad_norm": 0.0010743652192758285, | |
| "learning_rate": 1.2203499239266692e-06, | |
| "loss": 0.0, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 25.806451612903224, | |
| "grad_norm": 0.0010310743385034305, | |
| "learning_rate": 1.1758160062178093e-06, | |
| "loss": 0.0, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 25.887096774193548, | |
| "grad_norm": 0.0011360480164050648, | |
| "learning_rate": 1.1320590512898466e-06, | |
| "loss": 0.0, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 25.967741935483872, | |
| "grad_norm": 0.0008450027015439063, | |
| "learning_rate": 1.0890829119098335e-06, | |
| "loss": 0.0, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 26.048387096774192, | |
| "grad_norm": 0.0009978826896828905, | |
| "learning_rate": 1.0468913720946084e-06, | |
| "loss": 0.0, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 26.129032258064516, | |
| "grad_norm": 0.0009365752110522866, | |
| "learning_rate": 1.0054881467775946e-06, | |
| "loss": 0.0, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 26.20967741935484, | |
| "grad_norm": 0.0011409655223144374, | |
| "learning_rate": 9.648768814817243e-07, | |
| "loss": 0.0, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 26.29032258064516, | |
| "grad_norm": 0.0007612209261626826, | |
| "learning_rate": 9.25061151998441e-07, | |
| "loss": 0.0, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 26.370967741935484, | |
| "grad_norm": 0.0027386355238377654, | |
| "learning_rate": 8.860444640728561e-07, | |
| "loss": 0.0, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 26.451612903225808, | |
| "grad_norm": 0.0009116882726829429, | |
| "learning_rate": 8.478302530950689e-07, | |
| "loss": 0.0, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 26.532258064516128, | |
| "grad_norm": 0.001073900560909157, | |
| "learning_rate": 8.10421883797694e-07, | |
| "loss": 0.0, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 26.612903225806452, | |
| "grad_norm": 0.0010931791056291048, | |
| "learning_rate": 7.738226499595813e-07, | |
| "loss": 0.0, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 26.693548387096776, | |
| "grad_norm": 0.0010614425932874158, | |
| "learning_rate": 7.380357741158151e-07, | |
| "loss": 0.0, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 26.774193548387096, | |
| "grad_norm": 0.0007435354809896038, | |
| "learning_rate": 7.030644072739645e-07, | |
| "loss": 0.0, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 26.85483870967742, | |
| "grad_norm": 0.0009703100142531286, | |
| "learning_rate": 6.689116286366492e-07, | |
| "loss": 0.0, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 26.93548387096774, | |
| "grad_norm": 0.0010228645872483455, | |
| "learning_rate": 6.355804453304049e-07, | |
| "loss": 0.0, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 27.016129032258064, | |
| "grad_norm": 0.0007600786028404724, | |
| "learning_rate": 6.030737921409169e-07, | |
| "loss": 0.0, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 27.096774193548388, | |
| "grad_norm": 0.001095728378512467, | |
| "learning_rate": 5.713945312546132e-07, | |
| "loss": 0.0, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 27.177419354838708, | |
| "grad_norm": 0.0008650915449462145, | |
| "learning_rate": 5.405454520066489e-07, | |
| "loss": 0.0, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 27.258064516129032, | |
| "grad_norm": 0.0008937729046222959, | |
| "learning_rate": 5.105292706353093e-07, | |
| "loss": 0.0, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 27.338709677419356, | |
| "grad_norm": 0.0008191131154084349, | |
| "learning_rate": 4.813486300428483e-07, | |
| "loss": 0.0, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 27.419354838709676, | |
| "grad_norm": 0.0012869493320942784, | |
| "learning_rate": 4.530060995627783e-07, | |
| "loss": 0.0, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 27.5, | |
| "grad_norm": 0.0011064004412895856, | |
| "learning_rate": 4.2550417473364524e-07, | |
| "loss": 0.0, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 27.580645161290324, | |
| "grad_norm": 0.0008556904521743387, | |
| "learning_rate": 3.9884527707929835e-07, | |
| "loss": 0.0, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 27.661290322580644, | |
| "grad_norm": 0.0008391078563168919, | |
| "learning_rate": 3.730317538956785e-07, | |
| "loss": 0.0, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 27.741935483870968, | |
| "grad_norm": 0.0009604512249733587, | |
| "learning_rate": 3.4806587804413703e-07, | |
| "loss": 0.0, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 27.822580645161292, | |
| "grad_norm": 0.0009479344883082548, | |
| "learning_rate": 3.2394984775131257e-07, | |
| "loss": 0.0, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 27.903225806451612, | |
| "grad_norm": 0.0007730560057626099, | |
| "learning_rate": 3.006857864155832e-07, | |
| "loss": 0.0, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 27.983870967741936, | |
| "grad_norm": 0.0009226014418049943, | |
| "learning_rate": 2.7827574242009434e-07, | |
| "loss": 0.0, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 28.06451612903226, | |
| "grad_norm": 0.0011311857953386901, | |
| "learning_rate": 2.567216889524093e-07, | |
| "loss": 0.0, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 28.14516129032258, | |
| "grad_norm": 0.0009477577716149022, | |
| "learning_rate": 2.360255238307674e-07, | |
| "loss": 0.0, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 28.225806451612904, | |
| "grad_norm": 0.0008513879378162911, | |
| "learning_rate": 2.1618906933698059e-07, | |
| "loss": 0.0, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 28.306451612903224, | |
| "grad_norm": 0.0007379392020009556, | |
| "learning_rate": 1.9721407205598563e-07, | |
| "loss": 0.0, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 28.387096774193548, | |
| "grad_norm": 0.000830094984719501, | |
| "learning_rate": 1.7910220272205904e-07, | |
| "loss": 0.0, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 28.467741935483872, | |
| "grad_norm": 0.0009392013431277957, | |
| "learning_rate": 1.6185505607171027e-07, | |
| "loss": 0.0, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 28.548387096774192, | |
| "grad_norm": 0.0008243598311798101, | |
| "learning_rate": 1.4547415070325976e-07, | |
| "loss": 0.0, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 28.629032258064516, | |
| "grad_norm": 0.0011794056471585723, | |
| "learning_rate": 1.2996092894313917e-07, | |
| "loss": 0.0, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 28.70967741935484, | |
| "grad_norm": 0.0009197856830331534, | |
| "learning_rate": 1.1531675671888621e-07, | |
| "loss": 0.0, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 28.79032258064516, | |
| "grad_norm": 0.0009019519703182086, | |
| "learning_rate": 1.0154292343887872e-07, | |
| "loss": 0.0, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 28.870967741935484, | |
| "grad_norm": 0.001210488793948948, | |
| "learning_rate": 8.864064187880439e-08, | |
| "loss": 0.0, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 28.951612903225808, | |
| "grad_norm": 0.0007675357775001478, | |
| "learning_rate": 7.661104807487607e-08, | |
| "loss": 0.0, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 29.032258064516128, | |
| "grad_norm": 0.0011234399016604665, | |
| "learning_rate": 6.5455201223803e-08, | |
| "loss": 0.0, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 29.112903225806452, | |
| "grad_norm": 0.0007188838858821952, | |
| "learning_rate": 5.5174083589532024e-08, | |
| "loss": 0.0, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 29.193548387096776, | |
| "grad_norm": 0.0008649652953028542, | |
| "learning_rate": 4.576860041675679e-08, | |
| "loss": 0.0, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 29.274193548387096, | |
| "grad_norm": 0.0010294955725396954, | |
| "learning_rate": 3.723957985121707e-08, | |
| "loss": 0.0, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 29.35483870967742, | |
| "grad_norm": 0.0008889540042575743, | |
| "learning_rate": 2.9587772866774834e-08, | |
| "loss": 0.0, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 29.43548387096774, | |
| "grad_norm": 0.0009091513471530217, | |
| "learning_rate": 2.2813853199292745e-08, | |
| "loss": 0.0, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 29.516129032258064, | |
| "grad_norm": 0.0009837778433183545, | |
| "learning_rate": 1.6918417287318245e-08, | |
| "loss": 0.0, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 29.596774193548388, | |
| "grad_norm": 0.001034238537723969, | |
| "learning_rate": 1.1901984219558904e-08, | |
| "loss": 0.0, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 29.677419354838708, | |
| "grad_norm": 0.0010133712382654732, | |
| "learning_rate": 7.76499568918454e-09, | |
| "loss": 0.0, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 29.758064516129032, | |
| "grad_norm": 0.0009773095516892435, | |
| "learning_rate": 4.5078159549316605e-09, | |
| "loss": 0.0, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 29.838709677419356, | |
| "grad_norm": 0.0008437093875784567, | |
| "learning_rate": 2.130731809031339e-09, | |
| "loss": 0.0, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 29.919354838709676, | |
| "grad_norm": 0.0010424147363070917, | |
| "learning_rate": 6.339525519594159e-10, | |
| "loss": 0.0, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "grad_norm": 0.0006609763091981757, | |
| "learning_rate": 1.7609974007903875e-11, | |
| "loss": 0.0, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "step": 1860, | |
| "total_flos": 35446477946880.0, | |
| "train_loss": 0.053323694472176913, | |
| "train_runtime": 23282.7104, | |
| "train_samples_per_second": 2.547, | |
| "train_steps_per_second": 0.08 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1860, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 30, | |
| "save_steps": 310, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 35446477946880.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |