| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 30.0, | |
| "eval_steps": 500, | |
| "global_step": 1860, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.08064516129032258, | |
| "grad_norm": 7.9788904562924206, | |
| "learning_rate": 2.1505376344086024e-07, | |
| "loss": 0.7746, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.16129032258064516, | |
| "grad_norm": 9.521156430604261, | |
| "learning_rate": 4.838709677419355e-07, | |
| "loss": 0.7594, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.24193548387096775, | |
| "grad_norm": 6.990008524163482, | |
| "learning_rate": 7.526881720430108e-07, | |
| "loss": 0.7177, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.3225806451612903, | |
| "grad_norm": 5.245105441222518, | |
| "learning_rate": 1.021505376344086e-06, | |
| "loss": 0.622, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.4032258064516129, | |
| "grad_norm": 6.050490163797067, | |
| "learning_rate": 1.2903225806451614e-06, | |
| "loss": 0.5555, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.4838709677419355, | |
| "grad_norm": 4.382694958544679, | |
| "learning_rate": 1.5591397849462367e-06, | |
| "loss": 0.5051, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5645161290322581, | |
| "grad_norm": 3.5106557630691793, | |
| "learning_rate": 1.827956989247312e-06, | |
| "loss": 0.4399, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.6451612903225806, | |
| "grad_norm": 3.6811492921662814, | |
| "learning_rate": 2.096774193548387e-06, | |
| "loss": 0.4063, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.7258064516129032, | |
| "grad_norm": 3.441163884743535, | |
| "learning_rate": 2.3655913978494625e-06, | |
| "loss": 0.3723, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.8064516129032258, | |
| "grad_norm": 3.447581913907039, | |
| "learning_rate": 2.634408602150538e-06, | |
| "loss": 0.4851, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.8870967741935484, | |
| "grad_norm": 3.675136115986037, | |
| "learning_rate": 2.903225806451613e-06, | |
| "loss": 0.4024, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 3.0780394937329274, | |
| "learning_rate": 3.1720430107526885e-06, | |
| "loss": 0.413, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.0483870967741935, | |
| "grad_norm": 3.2116900427519384, | |
| "learning_rate": 3.440860215053764e-06, | |
| "loss": 0.3071, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.129032258064516, | |
| "grad_norm": 4.009148207400134, | |
| "learning_rate": 3.7096774193548392e-06, | |
| "loss": 0.3085, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.2096774193548387, | |
| "grad_norm": 2.8388506596193777, | |
| "learning_rate": 3.978494623655914e-06, | |
| "loss": 0.3009, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.2903225806451613, | |
| "grad_norm": 3.328140626191503, | |
| "learning_rate": 4.2473118279569895e-06, | |
| "loss": 0.3181, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.370967741935484, | |
| "grad_norm": 4.027543468539502, | |
| "learning_rate": 4.516129032258065e-06, | |
| "loss": 0.2963, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.4516129032258065, | |
| "grad_norm": 3.2636669590920286, | |
| "learning_rate": 4.78494623655914e-06, | |
| "loss": 0.2792, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.532258064516129, | |
| "grad_norm": 3.5992606337843487, | |
| "learning_rate": 5.0537634408602155e-06, | |
| "loss": 0.3347, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.6129032258064515, | |
| "grad_norm": 3.5694108633818504, | |
| "learning_rate": 5.322580645161291e-06, | |
| "loss": 0.3348, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.6935483870967742, | |
| "grad_norm": 3.660221218865697, | |
| "learning_rate": 5.591397849462365e-06, | |
| "loss": 0.3272, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.7741935483870968, | |
| "grad_norm": 3.4279728194623056, | |
| "learning_rate": 5.8602150537634415e-06, | |
| "loss": 0.2729, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.8548387096774195, | |
| "grad_norm": 3.3534086258710825, | |
| "learning_rate": 6.129032258064517e-06, | |
| "loss": 0.2944, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.935483870967742, | |
| "grad_norm": 3.040737011508716, | |
| "learning_rate": 6.397849462365592e-06, | |
| "loss": 0.3298, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.0161290322580645, | |
| "grad_norm": 3.0276013694676283, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.2396, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.096774193548387, | |
| "grad_norm": 4.83974864448008, | |
| "learning_rate": 6.935483870967743e-06, | |
| "loss": 0.1704, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.1774193548387095, | |
| "grad_norm": 3.078350989376756, | |
| "learning_rate": 7.204301075268818e-06, | |
| "loss": 0.1689, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.258064516129032, | |
| "grad_norm": 3.4510008163142754, | |
| "learning_rate": 7.4731182795698935e-06, | |
| "loss": 0.1483, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.338709677419355, | |
| "grad_norm": 3.3742727028380353, | |
| "learning_rate": 7.741935483870968e-06, | |
| "loss": 0.1847, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.4193548387096775, | |
| "grad_norm": 2.9190658488467767, | |
| "learning_rate": 8.010752688172043e-06, | |
| "loss": 0.181, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 3.2768686821304227, | |
| "learning_rate": 8.279569892473119e-06, | |
| "loss": 0.201, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.5806451612903225, | |
| "grad_norm": 2.8067701037574753, | |
| "learning_rate": 8.548387096774194e-06, | |
| "loss": 0.1922, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.661290322580645, | |
| "grad_norm": 2.951622834663203, | |
| "learning_rate": 8.81720430107527e-06, | |
| "loss": 0.2015, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.741935483870968, | |
| "grad_norm": 3.03623148237881, | |
| "learning_rate": 9.086021505376345e-06, | |
| "loss": 0.1822, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.8225806451612905, | |
| "grad_norm": 3.0772573560886514, | |
| "learning_rate": 9.35483870967742e-06, | |
| "loss": 0.227, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.903225806451613, | |
| "grad_norm": 2.9654885185259827, | |
| "learning_rate": 9.623655913978495e-06, | |
| "loss": 0.2212, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.9838709677419355, | |
| "grad_norm": 3.067899631927538, | |
| "learning_rate": 9.89247311827957e-06, | |
| "loss": 0.2277, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 3.064516129032258, | |
| "grad_norm": 2.7807126927253187, | |
| "learning_rate": 9.999920755303033e-06, | |
| "loss": 0.1288, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 3.1451612903225805, | |
| "grad_norm": 3.4305790367630777, | |
| "learning_rate": 9.999436491251425e-06, | |
| "loss": 0.1393, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 3.225806451612903, | |
| "grad_norm": 2.7900737962648043, | |
| "learning_rate": 9.998512030567253e-06, | |
| "loss": 0.1298, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 3.306451612903226, | |
| "grad_norm": 2.349901750902021, | |
| "learning_rate": 9.99714745464859e-06, | |
| "loss": 0.1306, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 3.3870967741935485, | |
| "grad_norm": 2.77370394045052, | |
| "learning_rate": 9.995342883645325e-06, | |
| "loss": 0.1521, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 3.467741935483871, | |
| "grad_norm": 3.0084835646623094, | |
| "learning_rate": 9.993098476448576e-06, | |
| "loss": 0.1314, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 3.5483870967741935, | |
| "grad_norm": 3.093621590181524, | |
| "learning_rate": 9.990414430676716e-06, | |
| "loss": 0.1545, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 3.629032258064516, | |
| "grad_norm": 3.1528581559810305, | |
| "learning_rate": 9.987290982657961e-06, | |
| "loss": 0.1929, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 3.709677419354839, | |
| "grad_norm": 2.8445198693562816, | |
| "learning_rate": 9.983728407409565e-06, | |
| "loss": 0.1305, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.790322580645161, | |
| "grad_norm": 2.960192414152783, | |
| "learning_rate": 9.979727018613607e-06, | |
| "loss": 0.1425, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 3.870967741935484, | |
| "grad_norm": 2.6571319169632703, | |
| "learning_rate": 9.975287168589369e-06, | |
| "loss": 0.1491, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.9516129032258065, | |
| "grad_norm": 3.3868215591855253, | |
| "learning_rate": 9.970409248262314e-06, | |
| "loss": 0.1688, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 4.032258064516129, | |
| "grad_norm": 2.091135299801856, | |
| "learning_rate": 9.965093687129669e-06, | |
| "loss": 0.1028, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 4.112903225806452, | |
| "grad_norm": 3.802878910271049, | |
| "learning_rate": 9.959340953222602e-06, | |
| "loss": 0.0842, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 4.193548387096774, | |
| "grad_norm": 3.7199837682735684, | |
| "learning_rate": 9.953151553065019e-06, | |
| "loss": 0.088, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 4.274193548387097, | |
| "grad_norm": 2.2960394303716116, | |
| "learning_rate": 9.94652603162896e-06, | |
| "loss": 0.0884, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 4.354838709677419, | |
| "grad_norm": 2.465062363580101, | |
| "learning_rate": 9.939464972286618e-06, | |
| "loss": 0.1006, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 4.435483870967742, | |
| "grad_norm": 2.0327702748628553, | |
| "learning_rate": 9.931968996758972e-06, | |
| "loss": 0.0899, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 4.516129032258064, | |
| "grad_norm": 2.305360963756021, | |
| "learning_rate": 9.924038765061042e-06, | |
| "loss": 0.079, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 4.596774193548387, | |
| "grad_norm": 2.4662415593135725, | |
| "learning_rate": 9.915674975443779e-06, | |
| "loss": 0.0875, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 4.67741935483871, | |
| "grad_norm": 2.555687280861228, | |
| "learning_rate": 9.906878364332586e-06, | |
| "loss": 0.0953, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 4.758064516129032, | |
| "grad_norm": 2.3818809469476476, | |
| "learning_rate": 9.897649706262474e-06, | |
| "loss": 0.0855, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 4.838709677419355, | |
| "grad_norm": 2.30924468453585, | |
| "learning_rate": 9.88798981380986e-06, | |
| "loss": 0.1138, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 4.919354838709677, | |
| "grad_norm": 2.047820547358064, | |
| "learning_rate": 9.877899537521028e-06, | |
| "loss": 0.1256, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 2.7534037559978417, | |
| "learning_rate": 9.867379765837237e-06, | |
| "loss": 0.1109, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 5.080645161290323, | |
| "grad_norm": 2.1913087837549052, | |
| "learning_rate": 9.85643142501649e-06, | |
| "loss": 0.0544, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 5.161290322580645, | |
| "grad_norm": 2.293004207498763, | |
| "learning_rate": 9.845055479051986e-06, | |
| "loss": 0.0678, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 5.241935483870968, | |
| "grad_norm": 1.9502100689755515, | |
| "learning_rate": 9.833252929587231e-06, | |
| "loss": 0.0596, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 5.32258064516129, | |
| "grad_norm": 2.377741796012424, | |
| "learning_rate": 9.821024815827854e-06, | |
| "loss": 0.0536, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 5.403225806451613, | |
| "grad_norm": 1.8788141547729034, | |
| "learning_rate": 9.808372214450093e-06, | |
| "loss": 0.066, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 5.483870967741936, | |
| "grad_norm": 2.18752083985909, | |
| "learning_rate": 9.795296239506011e-06, | |
| "loss": 0.064, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 5.564516129032258, | |
| "grad_norm": 2.1214320464857432, | |
| "learning_rate": 9.781798042325392e-06, | |
| "loss": 0.0718, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 5.645161290322581, | |
| "grad_norm": 2.3419858522206423, | |
| "learning_rate": 9.767878811414373e-06, | |
| "loss": 0.0751, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 5.725806451612903, | |
| "grad_norm": 2.0692559600108846, | |
| "learning_rate": 9.753539772350792e-06, | |
| "loss": 0.0559, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 5.806451612903226, | |
| "grad_norm": 1.7149217664306626, | |
| "learning_rate": 9.738782187676282e-06, | |
| "loss": 0.0615, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 5.887096774193548, | |
| "grad_norm": 2.420112879389103, | |
| "learning_rate": 9.723607356785103e-06, | |
| "loss": 0.0651, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 5.967741935483871, | |
| "grad_norm": 2.0342725263869488, | |
| "learning_rate": 9.70801661580973e-06, | |
| "loss": 0.0657, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 6.048387096774194, | |
| "grad_norm": 1.5676113830493696, | |
| "learning_rate": 9.692011337503212e-06, | |
| "loss": 0.048, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 6.129032258064516, | |
| "grad_norm": 1.4853313640607673, | |
| "learning_rate": 9.675592931118293e-06, | |
| "loss": 0.0395, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 6.209677419354839, | |
| "grad_norm": 2.076517691069182, | |
| "learning_rate": 9.658762842283343e-06, | |
| "loss": 0.0478, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 6.290322580645161, | |
| "grad_norm": 1.6175069613510986, | |
| "learning_rate": 9.641522552875055e-06, | |
| "loss": 0.0402, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 6.370967741935484, | |
| "grad_norm": 2.536372096562857, | |
| "learning_rate": 9.62387358088798e-06, | |
| "loss": 0.0512, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 6.451612903225806, | |
| "grad_norm": 1.8118088837546005, | |
| "learning_rate": 9.605817480300863e-06, | |
| "loss": 0.0534, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 6.532258064516129, | |
| "grad_norm": 2.286351385709345, | |
| "learning_rate": 9.587355840939813e-06, | |
| "loss": 0.0478, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 6.612903225806452, | |
| "grad_norm": 1.8901897009499684, | |
| "learning_rate": 9.568490288338324e-06, | |
| "loss": 0.0425, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 6.693548387096774, | |
| "grad_norm": 1.5196329706278056, | |
| "learning_rate": 9.549222483594154e-06, | |
| "loss": 0.0447, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 6.774193548387097, | |
| "grad_norm": 1.4880512934239636, | |
| "learning_rate": 9.529554123223053e-06, | |
| "loss": 0.0433, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 6.854838709677419, | |
| "grad_norm": 1.711263234463656, | |
| "learning_rate": 9.5094869390094e-06, | |
| "loss": 0.0452, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 6.935483870967742, | |
| "grad_norm": 1.8242713772702877, | |
| "learning_rate": 9.48902269785371e-06, | |
| "loss": 0.0417, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 7.016129032258065, | |
| "grad_norm": 1.408406163892193, | |
| "learning_rate": 9.468163201617063e-06, | |
| "loss": 0.05, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 7.096774193548387, | |
| "grad_norm": 1.0961989080978434, | |
| "learning_rate": 9.446910286962453e-06, | |
| "loss": 0.0249, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 7.17741935483871, | |
| "grad_norm": 1.7189796665774795, | |
| "learning_rate": 9.425265825193077e-06, | |
| "loss": 0.0317, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 7.258064516129032, | |
| "grad_norm": 1.9348300573137698, | |
| "learning_rate": 9.403231722087554e-06, | |
| "loss": 0.0347, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 7.338709677419355, | |
| "grad_norm": 1.7296791252489212, | |
| "learning_rate": 9.380809917732132e-06, | |
| "loss": 0.031, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 7.419354838709677, | |
| "grad_norm": 1.301188666000411, | |
| "learning_rate": 9.358002386349862e-06, | |
| "loss": 0.0337, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "grad_norm": 1.6849307326292067, | |
| "learning_rate": 9.334811136126778e-06, | |
| "loss": 0.034, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 7.580645161290323, | |
| "grad_norm": 1.8341284468308772, | |
| "learning_rate": 9.31123820903506e-06, | |
| "loss": 0.0425, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 7.661290322580645, | |
| "grad_norm": 1.6185716325286443, | |
| "learning_rate": 9.287285680653254e-06, | |
| "loss": 0.0334, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 7.741935483870968, | |
| "grad_norm": 1.4082036883187536, | |
| "learning_rate": 9.262955659983522e-06, | |
| "loss": 0.0365, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 7.82258064516129, | |
| "grad_norm": 1.9810172286312782, | |
| "learning_rate": 9.238250289265921e-06, | |
| "loss": 0.031, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 7.903225806451613, | |
| "grad_norm": 1.8318457886483148, | |
| "learning_rate": 9.21317174378982e-06, | |
| "loss": 0.0354, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 7.983870967741936, | |
| "grad_norm": 1.674905845501415, | |
| "learning_rate": 9.187722231702326e-06, | |
| "loss": 0.0409, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 8.064516129032258, | |
| "grad_norm": 1.206360757615427, | |
| "learning_rate": 9.161903993813892e-06, | |
| "loss": 0.0276, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 8.14516129032258, | |
| "grad_norm": 0.9995201803814792, | |
| "learning_rate": 9.135719303400995e-06, | |
| "loss": 0.019, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 8.225806451612904, | |
| "grad_norm": 1.77138842254183, | |
| "learning_rate": 9.10917046600598e-06, | |
| "loss": 0.0235, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 8.306451612903226, | |
| "grad_norm": 1.772318685251266, | |
| "learning_rate": 9.082259819234063e-06, | |
| "loss": 0.0262, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 8.387096774193548, | |
| "grad_norm": 1.3694563307357386, | |
| "learning_rate": 9.054989732547507e-06, | |
| "loss": 0.0274, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 8.46774193548387, | |
| "grad_norm": 1.5818168003304018, | |
| "learning_rate": 9.027362607056986e-06, | |
| "loss": 0.0281, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 8.548387096774194, | |
| "grad_norm": 1.477790915380952, | |
| "learning_rate": 8.999380875310176e-06, | |
| "loss": 0.0274, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 8.629032258064516, | |
| "grad_norm": 1.463450411153143, | |
| "learning_rate": 8.971047001077561e-06, | |
| "loss": 0.0305, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 8.709677419354838, | |
| "grad_norm": 1.4651802172060195, | |
| "learning_rate": 8.942363479135516e-06, | |
| "loss": 0.0293, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 8.790322580645162, | |
| "grad_norm": 1.4324189291721474, | |
| "learning_rate": 8.913332835046629e-06, | |
| "loss": 0.0239, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 8.870967741935484, | |
| "grad_norm": 1.5257751575787353, | |
| "learning_rate": 8.883957624937333e-06, | |
| "loss": 0.0261, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 8.951612903225806, | |
| "grad_norm": 1.6993511552941885, | |
| "learning_rate": 8.854240435272842e-06, | |
| "loss": 0.0484, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 9.03225806451613, | |
| "grad_norm": 1.159130365005857, | |
| "learning_rate": 8.824183882629411e-06, | |
| "loss": 0.027, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 9.112903225806452, | |
| "grad_norm": 1.2166909931601617, | |
| "learning_rate": 8.793790613463956e-06, | |
| "loss": 0.0183, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 9.193548387096774, | |
| "grad_norm": 1.160324774431725, | |
| "learning_rate": 8.763063303881021e-06, | |
| "loss": 0.0188, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 9.274193548387096, | |
| "grad_norm": 1.3440474061044185, | |
| "learning_rate": 8.73200465939717e-06, | |
| "loss": 0.0224, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 9.35483870967742, | |
| "grad_norm": 1.1812237336382116, | |
| "learning_rate": 8.700617414702746e-06, | |
| "loss": 0.0222, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 9.435483870967742, | |
| "grad_norm": 1.2040220371499268, | |
| "learning_rate": 8.668904333421098e-06, | |
| "loss": 0.0213, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 9.516129032258064, | |
| "grad_norm": 1.1242316338416976, | |
| "learning_rate": 8.636868207865244e-06, | |
| "loss": 0.026, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 9.596774193548388, | |
| "grad_norm": 1.2056824125041377, | |
| "learning_rate": 8.604511858792006e-06, | |
| "loss": 0.0212, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 9.67741935483871, | |
| "grad_norm": 1.2737111889090427, | |
| "learning_rate": 8.571838135153645e-06, | |
| "loss": 0.0221, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 9.758064516129032, | |
| "grad_norm": 1.2658665183240156, | |
| "learning_rate": 8.538849913847019e-06, | |
| "loss": 0.0199, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 9.838709677419354, | |
| "grad_norm": 1.444864038120413, | |
| "learning_rate": 8.505550099460264e-06, | |
| "loss": 0.0242, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 9.919354838709678, | |
| "grad_norm": 1.464062720341168, | |
| "learning_rate": 8.471941624017058e-06, | |
| "loss": 0.0241, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 1.159759434728226, | |
| "learning_rate": 8.43802744671845e-06, | |
| "loss": 0.0262, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 10.080645161290322, | |
| "grad_norm": 1.2344076422984258, | |
| "learning_rate": 8.403810553682307e-06, | |
| "loss": 0.018, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 10.161290322580646, | |
| "grad_norm": 1.1047319831516356, | |
| "learning_rate": 8.369293957680397e-06, | |
| "loss": 0.0156, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 10.241935483870968, | |
| "grad_norm": 0.9725174239581482, | |
| "learning_rate": 8.334480697873101e-06, | |
| "loss": 0.014, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 10.32258064516129, | |
| "grad_norm": 1.0873207313793845, | |
| "learning_rate": 8.299373839541829e-06, | |
| "loss": 0.0184, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 10.403225806451612, | |
| "grad_norm": 1.1713325775584265, | |
| "learning_rate": 8.26397647381912e-06, | |
| "loss": 0.0191, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 10.483870967741936, | |
| "grad_norm": 1.1289992391197494, | |
| "learning_rate": 8.228291717416472e-06, | |
| "loss": 0.0192, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 10.564516129032258, | |
| "grad_norm": 1.4540729880318348, | |
| "learning_rate": 8.192322712349917e-06, | |
| "loss": 0.0209, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 10.64516129032258, | |
| "grad_norm": 1.2131150635320254, | |
| "learning_rate": 8.15607262566337e-06, | |
| "loss": 0.0201, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 10.725806451612904, | |
| "grad_norm": 1.583339620420521, | |
| "learning_rate": 8.119544649149762e-06, | |
| "loss": 0.0207, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 10.806451612903226, | |
| "grad_norm": 1.298849150527335, | |
| "learning_rate": 8.08274199907003e-06, | |
| "loss": 0.017, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 10.887096774193548, | |
| "grad_norm": 1.1682269254944644, | |
| "learning_rate": 8.0456679158699e-06, | |
| "loss": 0.0175, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 10.967741935483872, | |
| "grad_norm": 1.3418845328882663, | |
| "learning_rate": 8.008325663894586e-06, | |
| "loss": 0.0199, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 11.048387096774194, | |
| "grad_norm": 1.0350578214030737, | |
| "learning_rate": 7.970718531101365e-06, | |
| "loss": 0.0173, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 11.129032258064516, | |
| "grad_norm": 0.7751062982939178, | |
| "learning_rate": 7.932849828770062e-06, | |
| "loss": 0.0154, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 11.209677419354838, | |
| "grad_norm": 1.1249340896245428, | |
| "learning_rate": 7.89472289121151e-06, | |
| "loss": 0.0153, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 11.290322580645162, | |
| "grad_norm": 1.069133814090821, | |
| "learning_rate": 7.856341075473963e-06, | |
| "loss": 0.0128, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 11.370967741935484, | |
| "grad_norm": 1.3386909438492063, | |
| "learning_rate": 7.817707761047498e-06, | |
| "loss": 0.0147, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 11.451612903225806, | |
| "grad_norm": 1.2544557604239914, | |
| "learning_rate": 7.77882634956647e-06, | |
| "loss": 0.0137, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 11.532258064516128, | |
| "grad_norm": 1.50608779604205, | |
| "learning_rate": 7.739700264509993e-06, | |
| "loss": 0.0158, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 11.612903225806452, | |
| "grad_norm": 0.8383520545182386, | |
| "learning_rate": 7.700332950900504e-06, | |
| "loss": 0.0115, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 11.693548387096774, | |
| "grad_norm": 1.1528102169152703, | |
| "learning_rate": 7.660727875000432e-06, | |
| "loss": 0.0151, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 11.774193548387096, | |
| "grad_norm": 1.1731217245462982, | |
| "learning_rate": 7.6208885240069995e-06, | |
| "loss": 0.0137, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 11.85483870967742, | |
| "grad_norm": 1.0109598241071842, | |
| "learning_rate": 7.5808184057451765e-06, | |
| "loss": 0.0133, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 11.935483870967742, | |
| "grad_norm": 1.1815937319956697, | |
| "learning_rate": 7.540521048358814e-06, | |
| "loss": 0.0168, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 12.016129032258064, | |
| "grad_norm": 0.6727751980657929, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.0145, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 12.096774193548388, | |
| "grad_norm": 0.8287342886061079, | |
| "learning_rate": 7.459258828516645e-06, | |
| "loss": 0.0108, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 12.17741935483871, | |
| "grad_norm": 0.8907982669901318, | |
| "learning_rate": 7.418301121138335e-06, | |
| "loss": 0.0082, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 12.258064516129032, | |
| "grad_norm": 1.08087496747789, | |
| "learning_rate": 7.3771304841604764e-06, | |
| "loss": 0.0121, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 12.338709677419354, | |
| "grad_norm": 0.9242022062218133, | |
| "learning_rate": 7.335750542626772e-06, | |
| "loss": 0.0118, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 12.419354838709678, | |
| "grad_norm": 0.6674173817031188, | |
| "learning_rate": 7.294164940010031e-06, | |
| "loss": 0.0088, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "grad_norm": 0.7427776074770551, | |
| "learning_rate": 7.2523773378913655e-06, | |
| "loss": 0.01, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 12.580645161290322, | |
| "grad_norm": 0.8105255073657276, | |
| "learning_rate": 7.210391415637797e-06, | |
| "loss": 0.0102, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 12.661290322580646, | |
| "grad_norm": 0.9355789095839642, | |
| "learning_rate": 7.168210870078277e-06, | |
| "loss": 0.0112, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 12.741935483870968, | |
| "grad_norm": 0.8096036161441085, | |
| "learning_rate": 7.125839415178204e-06, | |
| "loss": 0.0109, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 12.82258064516129, | |
| "grad_norm": 0.6993400061698866, | |
| "learning_rate": 7.083280781712394e-06, | |
| "loss": 0.0125, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 12.903225806451612, | |
| "grad_norm": 0.9083278507643704, | |
| "learning_rate": 7.0405387169365965e-06, | |
| "loss": 0.0128, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 12.983870967741936, | |
| "grad_norm": 0.8581097245501025, | |
| "learning_rate": 6.9976169842575526e-06, | |
| "loss": 0.0095, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 13.064516129032258, | |
| "grad_norm": 1.1318882973999245, | |
| "learning_rate": 6.9545193629016215e-06, | |
| "loss": 0.0053, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 13.14516129032258, | |
| "grad_norm": 0.6323423097700148, | |
| "learning_rate": 6.911249647582036e-06, | |
| "loss": 0.0071, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 13.225806451612904, | |
| "grad_norm": 0.9759430461350306, | |
| "learning_rate": 6.867811648164769e-06, | |
| "loss": 0.0084, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 13.306451612903226, | |
| "grad_norm": 0.5436428472489789, | |
| "learning_rate": 6.824209189333082e-06, | |
| "loss": 0.0059, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 13.387096774193548, | |
| "grad_norm": 0.8064226300726538, | |
| "learning_rate": 6.780446110250766e-06, | |
| "loss": 0.0083, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 13.46774193548387, | |
| "grad_norm": 0.6692079180896646, | |
| "learning_rate": 6.736526264224101e-06, | |
| "loss": 0.0069, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 13.548387096774194, | |
| "grad_norm": 0.7920667479327235, | |
| "learning_rate": 6.692453518362587e-06, | |
| "loss": 0.0087, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 13.629032258064516, | |
| "grad_norm": 0.9743408271346984, | |
| "learning_rate": 6.648231753238431e-06, | |
| "loss": 0.0108, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 13.709677419354838, | |
| "grad_norm": 1.054329395592257, | |
| "learning_rate": 6.603864862544879e-06, | |
| "loss": 0.008, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 13.790322580645162, | |
| "grad_norm": 0.6890711609162895, | |
| "learning_rate": 6.5593567527533715e-06, | |
| "loss": 0.007, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 13.870967741935484, | |
| "grad_norm": 0.8929770220878291, | |
| "learning_rate": 6.514711342769588e-06, | |
| "loss": 0.0094, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 13.951612903225806, | |
| "grad_norm": 0.8470688740181095, | |
| "learning_rate": 6.469932563588386e-06, | |
| "loss": 0.0074, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 14.03225806451613, | |
| "grad_norm": 0.929920833432491, | |
| "learning_rate": 6.425024357947677e-06, | |
| "loss": 0.0081, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 14.112903225806452, | |
| "grad_norm": 0.5368583000455479, | |
| "learning_rate": 6.3799906799812805e-06, | |
| "loss": 0.0039, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 14.193548387096774, | |
| "grad_norm": 0.5542449665774428, | |
| "learning_rate": 6.334835494870759e-06, | |
| "loss": 0.0058, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 14.274193548387096, | |
| "grad_norm": 1.001191478848351, | |
| "learning_rate": 6.289562778496285e-06, | |
| "loss": 0.0058, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 14.35483870967742, | |
| "grad_norm": 0.4192339189229186, | |
| "learning_rate": 6.244176517086573e-06, | |
| "loss": 0.0033, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 14.435483870967742, | |
| "grad_norm": 0.5368429196948196, | |
| "learning_rate": 6.1986807068678926e-06, | |
| "loss": 0.0052, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 14.516129032258064, | |
| "grad_norm": 0.9333482099983527, | |
| "learning_rate": 6.153079353712201e-06, | |
| "loss": 0.0057, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 14.596774193548388, | |
| "grad_norm": 1.0750824183015748, | |
| "learning_rate": 6.107376472784438e-06, | |
| "loss": 0.0064, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 14.67741935483871, | |
| "grad_norm": 0.46525235985013963, | |
| "learning_rate": 6.061576088188981e-06, | |
| "loss": 0.0039, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 14.758064516129032, | |
| "grad_norm": 0.7126091866734285, | |
| "learning_rate": 6.015682232615336e-06, | |
| "loss": 0.0064, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 14.838709677419354, | |
| "grad_norm": 0.5389766142133813, | |
| "learning_rate": 5.969698946983055e-06, | |
| "loss": 0.0046, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 14.919354838709678, | |
| "grad_norm": 0.7912398368360793, | |
| "learning_rate": 5.923630280085948e-06, | |
| "loss": 0.0066, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "grad_norm": 0.3365541360781848, | |
| "learning_rate": 5.877480288235569e-06, | |
| "loss": 0.0041, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 15.080645161290322, | |
| "grad_norm": 0.5756821998704884, | |
| "learning_rate": 5.831253034904083e-06, | |
| "loss": 0.0032, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 15.161290322580646, | |
| "grad_norm": 0.5472260450914866, | |
| "learning_rate": 5.7849525903664636e-06, | |
| "loss": 0.003, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 15.241935483870968, | |
| "grad_norm": 0.7413872213041397, | |
| "learning_rate": 5.738583031342123e-06, | |
| "loss": 0.0021, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 15.32258064516129, | |
| "grad_norm": 0.31105156794159244, | |
| "learning_rate": 5.692148440635946e-06, | |
| "loss": 0.0046, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 15.403225806451612, | |
| "grad_norm": 0.49486817449196063, | |
| "learning_rate": 5.645652906778808e-06, | |
| "loss": 0.0045, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 15.483870967741936, | |
| "grad_norm": 0.495881963809128, | |
| "learning_rate": 5.599100523667586e-06, | |
| "loss": 0.0036, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 15.564516129032258, | |
| "grad_norm": 0.1904481467324096, | |
| "learning_rate": 5.552495390204691e-06, | |
| "loss": 0.003, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 15.64516129032258, | |
| "grad_norm": 1.4986437522735607, | |
| "learning_rate": 5.505841609937162e-06, | |
| "loss": 0.0035, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 15.725806451612904, | |
| "grad_norm": 0.47001689779830086, | |
| "learning_rate": 5.4591432906953515e-06, | |
| "loss": 0.0048, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 15.806451612903226, | |
| "grad_norm": 0.5583329472352823, | |
| "learning_rate": 5.412404544231235e-06, | |
| "loss": 0.0037, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 15.887096774193548, | |
| "grad_norm": 0.5783854060659827, | |
| "learning_rate": 5.365629485856381e-06, | |
| "loss": 0.0042, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 15.967741935483872, | |
| "grad_norm": 0.2835549036025041, | |
| "learning_rate": 5.318822234079584e-06, | |
| "loss": 0.0038, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 16.048387096774192, | |
| "grad_norm": 0.26543863895778785, | |
| "learning_rate": 5.271986910244254e-06, | |
| "loss": 0.0033, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 16.129032258064516, | |
| "grad_norm": 0.3461850747797189, | |
| "learning_rate": 5.225127638165514e-06, | |
| "loss": 0.0012, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 16.20967741935484, | |
| "grad_norm": 0.20302112651652984, | |
| "learning_rate": 5.178248543767122e-06, | |
| "loss": 0.0023, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 16.29032258064516, | |
| "grad_norm": 0.29439593626200566, | |
| "learning_rate": 5.1313537547181716e-06, | |
| "loss": 0.0011, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 16.370967741935484, | |
| "grad_norm": 0.8139757999523084, | |
| "learning_rate": 5.084447400069656e-06, | |
| "loss": 0.0034, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 16.451612903225808, | |
| "grad_norm": 0.07328838920365086, | |
| "learning_rate": 5.037533609890917e-06, | |
| "loss": 0.002, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 16.532258064516128, | |
| "grad_norm": 0.5394374063568352, | |
| "learning_rate": 4.990616514905982e-06, | |
| "loss": 0.003, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 16.612903225806452, | |
| "grad_norm": 0.5273102775020326, | |
| "learning_rate": 4.943700246129871e-06, | |
| "loss": 0.0028, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 16.693548387096776, | |
| "grad_norm": 0.22923181299955606, | |
| "learning_rate": 4.896788934504853e-06, | |
| "loss": 0.0023, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 16.774193548387096, | |
| "grad_norm": 0.4388799078492787, | |
| "learning_rate": 4.849886710536725e-06, | |
| "loss": 0.0016, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 16.85483870967742, | |
| "grad_norm": 0.23756274003048963, | |
| "learning_rate": 4.802997703931124e-06, | |
| "loss": 0.002, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 16.93548387096774, | |
| "grad_norm": 0.5091835157167591, | |
| "learning_rate": 4.7561260432299015e-06, | |
| "loss": 0.002, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 17.016129032258064, | |
| "grad_norm": 0.04880466280278329, | |
| "learning_rate": 4.7092758554476215e-06, | |
| "loss": 0.0022, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 17.096774193548388, | |
| "grad_norm": 0.0690662865214496, | |
| "learning_rate": 4.662451265708174e-06, | |
| "loss": 0.0005, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 17.177419354838708, | |
| "grad_norm": 0.3876540766195839, | |
| "learning_rate": 4.6156563968815575e-06, | |
| "loss": 0.0008, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 17.258064516129032, | |
| "grad_norm": 0.25500845510351994, | |
| "learning_rate": 4.568895369220868e-06, | |
| "loss": 0.003, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 17.338709677419356, | |
| "grad_norm": 0.6330419705015018, | |
| "learning_rate": 4.52217229999951e-06, | |
| "loss": 0.0022, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 17.419354838709676, | |
| "grad_norm": 0.11483151989876988, | |
| "learning_rate": 4.47549130314868e-06, | |
| "loss": 0.001, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 17.5, | |
| "grad_norm": 0.17159385370431057, | |
| "learning_rate": 4.428856488895128e-06, | |
| "loss": 0.0012, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 17.580645161290324, | |
| "grad_norm": 0.1863212713925692, | |
| "learning_rate": 4.382271963399268e-06, | |
| "loss": 0.0015, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 17.661290322580644, | |
| "grad_norm": 0.06850576072221558, | |
| "learning_rate": 4.33574182839362e-06, | |
| "loss": 0.0004, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 17.741935483870968, | |
| "grad_norm": 0.10525402416881399, | |
| "learning_rate": 4.28927018082167e-06, | |
| "loss": 0.0005, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 17.822580645161292, | |
| "grad_norm": 0.12279014037290367, | |
| "learning_rate": 4.2428611124771184e-06, | |
| "loss": 0.0007, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 17.903225806451612, | |
| "grad_norm": 0.02608965513556553, | |
| "learning_rate": 4.19651870964362e-06, | |
| "loss": 0.001, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 17.983870967741936, | |
| "grad_norm": 0.25429574204112215, | |
| "learning_rate": 4.150247052734979e-06, | |
| "loss": 0.0004, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 18.06451612903226, | |
| "grad_norm": 0.243250705158098, | |
| "learning_rate": 4.104050215935875e-06, | |
| "loss": 0.0009, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 18.14516129032258, | |
| "grad_norm": 0.061519211899759375, | |
| "learning_rate": 4.0579322668431295e-06, | |
| "loss": 0.0001, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 18.225806451612904, | |
| "grad_norm": 0.16536193010806607, | |
| "learning_rate": 4.011897266107567e-06, | |
| "loss": 0.0007, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 18.306451612903224, | |
| "grad_norm": 0.010913083064682323, | |
| "learning_rate": 3.965949267076465e-06, | |
| "loss": 0.0002, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 18.387096774193548, | |
| "grad_norm": 0.20004856388884348, | |
| "learning_rate": 3.9200923154366685e-06, | |
| "loss": 0.0001, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 18.467741935483872, | |
| "grad_norm": 0.3463516946351112, | |
| "learning_rate": 3.874330448858369e-06, | |
| "loss": 0.0005, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 18.548387096774192, | |
| "grad_norm": 0.2325315949839801, | |
| "learning_rate": 3.8286676966395895e-06, | |
| "loss": 0.0011, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 18.629032258064516, | |
| "grad_norm": 0.008775015145377176, | |
| "learning_rate": 3.7831080793514065e-06, | |
| "loss": 0.0001, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 18.70967741935484, | |
| "grad_norm": 0.02260946928685491, | |
| "learning_rate": 3.7376556084839465e-06, | |
| "loss": 0.0001, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 18.79032258064516, | |
| "grad_norm": 0.018527225185909574, | |
| "learning_rate": 3.692314286093167e-06, | |
| "loss": 0.0006, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 18.870967741935484, | |
| "grad_norm": 0.07806138648571144, | |
| "learning_rate": 3.647088104448494e-06, | |
| "loss": 0.0008, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 18.951612903225808, | |
| "grad_norm": 0.06736257660294817, | |
| "learning_rate": 3.601981045681292e-06, | |
| "loss": 0.0003, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 19.032258064516128, | |
| "grad_norm": 1.0106482864576842, | |
| "learning_rate": 3.556997081434248e-06, | |
| "loss": 0.0003, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 19.112903225806452, | |
| "grad_norm": 0.0066423021925601415, | |
| "learning_rate": 3.5121401725116653e-06, | |
| "loss": 0.0001, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 19.193548387096776, | |
| "grad_norm": 0.009775692796235318, | |
| "learning_rate": 3.4674142685307264e-06, | |
| "loss": 0.0002, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 19.274193548387096, | |
| "grad_norm": 0.005239098445984768, | |
| "learning_rate": 3.4228233075737225e-06, | |
| "loss": 0.0002, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 19.35483870967742, | |
| "grad_norm": 0.006226349311105782, | |
| "learning_rate": 3.3783712158413163e-06, | |
| "loss": 0.0005, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 19.43548387096774, | |
| "grad_norm": 0.03001061383604504, | |
| "learning_rate": 3.3340619073068347e-06, | |
| "loss": 0.0001, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 19.516129032258064, | |
| "grad_norm": 0.16124736678411333, | |
| "learning_rate": 3.289899283371657e-06, | |
| "loss": 0.0003, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 19.596774193548388, | |
| "grad_norm": 0.01406085852693565, | |
| "learning_rate": 3.2458872325216893e-06, | |
| "loss": 0.0001, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 19.677419354838708, | |
| "grad_norm": 0.1457872428183982, | |
| "learning_rate": 3.202029629984991e-06, | |
| "loss": 0.0004, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 19.758064516129032, | |
| "grad_norm": 0.6063889583401607, | |
| "learning_rate": 3.158330337390565e-06, | |
| "loss": 0.0012, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 19.838709677419356, | |
| "grad_norm": 0.014910439511376826, | |
| "learning_rate": 3.1147932024283424e-06, | |
| "loss": 0.0001, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 19.919354838709676, | |
| "grad_norm": 0.05207495007601388, | |
| "learning_rate": 3.071422058510394e-06, | |
| "loss": 0.0001, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.007164451638116118, | |
| "learning_rate": 3.0282207244334084e-06, | |
| "loss": 0.0002, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 20.080645161290324, | |
| "grad_norm": 0.005575173796042589, | |
| "learning_rate": 2.9851930040424383e-06, | |
| "loss": 0.0001, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 20.161290322580644, | |
| "grad_norm": 0.005587015782173161, | |
| "learning_rate": 2.9423426858959892e-06, | |
| "loss": 0.0001, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 20.241935483870968, | |
| "grad_norm": 0.004532759604085177, | |
| "learning_rate": 2.8996735429324256e-06, | |
| "loss": 0.0002, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 20.322580645161292, | |
| "grad_norm": 0.004246114621562207, | |
| "learning_rate": 2.8571893321377773e-06, | |
| "loss": 0.0004, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 20.403225806451612, | |
| "grad_norm": 0.00657132905130073, | |
| "learning_rate": 2.8148937942149347e-06, | |
| "loss": 0.0003, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 20.483870967741936, | |
| "grad_norm": 0.2530412260685536, | |
| "learning_rate": 2.7727906532542783e-06, | |
| "loss": 0.0004, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 20.56451612903226, | |
| "grad_norm": 0.017709512106030396, | |
| "learning_rate": 2.7308836164057913e-06, | |
| "loss": 0.0001, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 20.64516129032258, | |
| "grad_norm": 0.004774683400567215, | |
| "learning_rate": 2.6891763735526223e-06, | |
| "loss": 0.0002, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 20.725806451612904, | |
| "grad_norm": 0.004893229451274434, | |
| "learning_rate": 2.6476725969862227e-06, | |
| "loss": 0.0001, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 20.806451612903224, | |
| "grad_norm": 0.003953154707931952, | |
| "learning_rate": 2.6063759410829813e-06, | |
| "loss": 0.0001, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 20.887096774193548, | |
| "grad_norm": 0.0095082103536684, | |
| "learning_rate": 2.565290041982471e-06, | |
| "loss": 0.0, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 20.967741935483872, | |
| "grad_norm": 0.005067453856023592, | |
| "learning_rate": 2.524418517267283e-06, | |
| "loss": 0.0, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 21.048387096774192, | |
| "grad_norm": 0.008752186832681854, | |
| "learning_rate": 2.4837649656445117e-06, | |
| "loss": 0.0, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 21.129032258064516, | |
| "grad_norm": 0.004280084453557012, | |
| "learning_rate": 2.4433329666288774e-06, | |
| "loss": 0.0, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 21.20967741935484, | |
| "grad_norm": 0.0034997530359262684, | |
| "learning_rate": 2.4031260802275623e-06, | |
| "loss": 0.0, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 21.29032258064516, | |
| "grad_norm": 0.0034421062676381043, | |
| "learning_rate": 2.3631478466267498e-06, | |
| "loss": 0.0, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 21.370967741935484, | |
| "grad_norm": 0.004620512533431532, | |
| "learning_rate": 2.323401785879921e-06, | |
| "loss": 0.0001, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 21.451612903225808, | |
| "grad_norm": 0.0034331243522691852, | |
| "learning_rate": 2.283891397597908e-06, | |
| "loss": 0.0, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 21.532258064516128, | |
| "grad_norm": 0.003428560227543178, | |
| "learning_rate": 2.2446201606407537e-06, | |
| "loss": 0.0, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 21.612903225806452, | |
| "grad_norm": 0.003308032898823153, | |
| "learning_rate": 2.205591532811416e-06, | |
| "loss": 0.0, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 21.693548387096776, | |
| "grad_norm": 0.003612408103220046, | |
| "learning_rate": 2.166808950551296e-06, | |
| "loss": 0.0, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 21.774193548387096, | |
| "grad_norm": 0.003933662692416643, | |
| "learning_rate": 2.128275828637664e-06, | |
| "loss": 0.0, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 21.85483870967742, | |
| "grad_norm": 0.12319110414921805, | |
| "learning_rate": 2.089995559883004e-06, | |
| "loss": 0.0004, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 21.93548387096774, | |
| "grad_norm": 0.002895844359378954, | |
| "learning_rate": 2.0519715148362585e-06, | |
| "loss": 0.0003, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 22.016129032258064, | |
| "grad_norm": 0.0037964033544630386, | |
| "learning_rate": 2.0142070414860704e-06, | |
| "loss": 0.0, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 22.096774193548388, | |
| "grad_norm": 0.00266301897265662, | |
| "learning_rate": 1.976705464965985e-06, | |
| "loss": 0.0, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 22.177419354838708, | |
| "grad_norm": 0.0031847657946017243, | |
| "learning_rate": 1.9394700872616856e-06, | |
| "loss": 0.0, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 22.258064516129032, | |
| "grad_norm": 0.0036410292226950916, | |
| "learning_rate": 1.902504186920245e-06, | |
| "loss": 0.0, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 22.338709677419356, | |
| "grad_norm": 0.0039062296980956895, | |
| "learning_rate": 1.8658110187614538e-06, | |
| "loss": 0.0, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 22.419354838709676, | |
| "grad_norm": 0.0028439324531372924, | |
| "learning_rate": 1.8293938135912475e-06, | |
| "loss": 0.0, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 22.5, | |
| "grad_norm": 0.004399079537236235, | |
| "learning_rate": 1.793255777917217e-06, | |
| "loss": 0.0, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 22.580645161290324, | |
| "grad_norm": 0.002701655541409058, | |
| "learning_rate": 1.7574000936662928e-06, | |
| "loss": 0.0, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 22.661290322580644, | |
| "grad_norm": 0.002570485405657819, | |
| "learning_rate": 1.7218299179045789e-06, | |
| "loss": 0.0, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 22.741935483870968, | |
| "grad_norm": 0.009168180129546039, | |
| "learning_rate": 1.6865483825593643e-06, | |
| "loss": 0.0, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 22.822580645161292, | |
| "grad_norm": 0.002522063273166655, | |
| "learning_rate": 1.6515585941433694e-06, | |
| "loss": 0.0, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 22.903225806451612, | |
| "grad_norm": 0.0028124468856722045, | |
| "learning_rate": 1.6168636334812126e-06, | |
| "loss": 0.0, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 22.983870967741936, | |
| "grad_norm": 0.0024194505739071723, | |
| "learning_rate": 1.5824665554381579e-06, | |
| "loss": 0.0001, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 23.06451612903226, | |
| "grad_norm": 0.0021726227770893767, | |
| "learning_rate": 1.5483703886511191e-06, | |
| "loss": 0.0, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 23.14516129032258, | |
| "grad_norm": 0.003023753573205281, | |
| "learning_rate": 1.5145781352620054e-06, | |
| "loss": 0.0, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 23.225806451612904, | |
| "grad_norm": 0.0020433938309632202, | |
| "learning_rate": 1.481092770653374e-06, | |
| "loss": 0.0, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 23.306451612903224, | |
| "grad_norm": 0.0024198754613308283, | |
| "learning_rate": 1.4479172431864647e-06, | |
| "loss": 0.0, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 23.387096774193548, | |
| "grad_norm": 0.0024310415738286306, | |
| "learning_rate": 1.4150544739415755e-06, | |
| "loss": 0.0, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 23.467741935483872, | |
| "grad_norm": 0.003158130199121038, | |
| "learning_rate": 1.382507356460891e-06, | |
| "loss": 0.0, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 23.548387096774192, | |
| "grad_norm": 0.0026174264468685694, | |
| "learning_rate": 1.3502787564936875e-06, | |
| "loss": 0.0001, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 23.629032258064516, | |
| "grad_norm": 0.002536775368245464, | |
| "learning_rate": 1.3183715117440143e-06, | |
| "loss": 0.0, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 23.70967741935484, | |
| "grad_norm": 0.002180203461038152, | |
| "learning_rate": 1.2867884316208345e-06, | |
| "loss": 0.0, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 23.79032258064516, | |
| "grad_norm": 0.002128799051994566, | |
| "learning_rate": 1.255532296990662e-06, | |
| "loss": 0.0, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 23.870967741935484, | |
| "grad_norm": 0.0018781087737587013, | |
| "learning_rate": 1.2246058599327021e-06, | |
| "loss": 0.0, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 23.951612903225808, | |
| "grad_norm": 0.0025644628207549037, | |
| "learning_rate": 1.194011843496537e-06, | |
| "loss": 0.0, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 24.032258064516128, | |
| "grad_norm": 0.0020685617062611984, | |
| "learning_rate": 1.163752941462362e-06, | |
| "loss": 0.0, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 24.112903225806452, | |
| "grad_norm": 0.0021144359766594833, | |
| "learning_rate": 1.1338318181038037e-06, | |
| "loss": 0.0, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 24.193548387096776, | |
| "grad_norm": 0.0022016988810620295, | |
| "learning_rate": 1.1042511079533275e-06, | |
| "loss": 0.0, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 24.274193548387096, | |
| "grad_norm": 0.0027669232261221806, | |
| "learning_rate": 1.0750134155702674e-06, | |
| "loss": 0.0, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 24.35483870967742, | |
| "grad_norm": 0.00198403670198103, | |
| "learning_rate": 1.046121315311508e-06, | |
| "loss": 0.0, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 24.43548387096774, | |
| "grad_norm": 0.0018395411127014771, | |
| "learning_rate": 1.017577351104801e-06, | |
| "loss": 0.0, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 24.516129032258064, | |
| "grad_norm": 0.0021241222561047836, | |
| "learning_rate": 9.893840362247809e-07, | |
| "loss": 0.0, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 24.596774193548388, | |
| "grad_norm": 0.002272620643036318, | |
| "learning_rate": 9.615438530716753e-07, | |
| "loss": 0.0, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 24.677419354838708, | |
| "grad_norm": 0.0023806482249812736, | |
| "learning_rate": 9.340592529527237e-07, | |
| "loss": 0.0, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 24.758064516129032, | |
| "grad_norm": 0.0020728577474889973, | |
| "learning_rate": 9.069326558663488e-07, | |
| "loss": 0.0, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 24.838709677419356, | |
| "grad_norm": 0.001887217315306185, | |
| "learning_rate": 8.801664502890722e-07, | |
| "loss": 0.0, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 24.919354838709676, | |
| "grad_norm": 0.00239885550496235, | |
| "learning_rate": 8.537629929652186e-07, | |
| "loss": 0.0, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "grad_norm": 0.0016858785746472528, | |
| "learning_rate": 8.277246086993962e-07, | |
| "loss": 0.0, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 25.080645161290324, | |
| "grad_norm": 0.002028853985369188, | |
| "learning_rate": 8.02053590151805e-07, | |
| "loss": 0.0, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 25.161290322580644, | |
| "grad_norm": 0.002779776831477939, | |
| "learning_rate": 7.767521976363735e-07, | |
| "loss": 0.0, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 25.241935483870968, | |
| "grad_norm": 0.002343674176808224, | |
| "learning_rate": 7.518226589217286e-07, | |
| "loss": 0.0, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 25.322580645161292, | |
| "grad_norm": 0.0016113809080491644, | |
| "learning_rate": 7.27267169035053e-07, | |
| "loss": 0.0, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 25.403225806451612, | |
| "grad_norm": 0.0019011835378859039, | |
| "learning_rate": 7.030878900688115e-07, | |
| "loss": 0.0, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 25.483870967741936, | |
| "grad_norm": 0.0019987840678987335, | |
| "learning_rate": 6.792869509903777e-07, | |
| "loss": 0.0, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 25.56451612903226, | |
| "grad_norm": 0.002397024169372539, | |
| "learning_rate": 6.558664474545817e-07, | |
| "loss": 0.0, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 25.64516129032258, | |
| "grad_norm": 0.0019208450596181978, | |
| "learning_rate": 6.328284416191893e-07, | |
| "loss": 0.0, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 25.725806451612904, | |
| "grad_norm": 0.0016695639181624647, | |
| "learning_rate": 6.101749619633346e-07, | |
| "loss": 0.0, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 25.806451612903224, | |
| "grad_norm": 0.0021158163462360858, | |
| "learning_rate": 5.879080031089047e-07, | |
| "loss": 0.0, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 25.887096774193548, | |
| "grad_norm": 0.002030638213594273, | |
| "learning_rate": 5.660295256449233e-07, | |
| "loss": 0.0, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 25.967741935483872, | |
| "grad_norm": 0.0019761331922791415, | |
| "learning_rate": 5.445414559549167e-07, | |
| "loss": 0.0, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 26.048387096774192, | |
| "grad_norm": 0.0018309924188128091, | |
| "learning_rate": 5.234456860473042e-07, | |
| "loss": 0.0, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 26.129032258064516, | |
| "grad_norm": 0.0015836629417467344, | |
| "learning_rate": 5.027440733887973e-07, | |
| "loss": 0.0, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 26.20967741935484, | |
| "grad_norm": 0.002069672685194233, | |
| "learning_rate": 4.824384407408622e-07, | |
| "loss": 0.0, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 26.29032258064516, | |
| "grad_norm": 0.0016053789037440292, | |
| "learning_rate": 4.625305759992205e-07, | |
| "loss": 0.0, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 26.370967741935484, | |
| "grad_norm": 0.003038986841781915, | |
| "learning_rate": 4.4302223203642803e-07, | |
| "loss": 0.0, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 26.451612903225808, | |
| "grad_norm": 0.016841912471917662, | |
| "learning_rate": 4.2391512654753443e-07, | |
| "loss": 0.0, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 26.532258064516128, | |
| "grad_norm": 0.0021731322250136444, | |
| "learning_rate": 4.05210941898847e-07, | |
| "loss": 0.0, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 26.612903225806452, | |
| "grad_norm": 0.002426890030757636, | |
| "learning_rate": 3.8691132497979064e-07, | |
| "loss": 0.0, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 26.693548387096776, | |
| "grad_norm": 0.0018074854399242187, | |
| "learning_rate": 3.6901788705790753e-07, | |
| "loss": 0.0, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 26.774193548387096, | |
| "grad_norm": 0.001885989581450769, | |
| "learning_rate": 3.5153220363698225e-07, | |
| "loss": 0.0, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 26.85483870967742, | |
| "grad_norm": 0.0019683691140617316, | |
| "learning_rate": 3.344558143183246e-07, | |
| "loss": 0.0, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 26.93548387096774, | |
| "grad_norm": 0.0020554436684360193, | |
| "learning_rate": 3.1779022266520245e-07, | |
| "loss": 0.0, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 27.016129032258064, | |
| "grad_norm": 0.0016450213648640925, | |
| "learning_rate": 3.015368960704584e-07, | |
| "loss": 0.0, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 27.096774193548388, | |
| "grad_norm": 0.0017297405266144185, | |
| "learning_rate": 2.856972656273066e-07, | |
| "loss": 0.0, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 27.177419354838708, | |
| "grad_norm": 0.0021991582033148538, | |
| "learning_rate": 2.7027272600332443e-07, | |
| "loss": 0.0, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 27.258064516129032, | |
| "grad_norm": 0.001867924988471241, | |
| "learning_rate": 2.5526463531765467e-07, | |
| "loss": 0.0, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 27.338709677419356, | |
| "grad_norm": 0.0017309263826738985, | |
| "learning_rate": 2.4067431502142414e-07, | |
| "loss": 0.0, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 27.419354838709676, | |
| "grad_norm": 0.002424011353540903, | |
| "learning_rate": 2.2650304978138916e-07, | |
| "loss": 0.0, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 27.5, | |
| "grad_norm": 0.014373142062278578, | |
| "learning_rate": 2.1275208736682262e-07, | |
| "loss": 0.0, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 27.580645161290324, | |
| "grad_norm": 0.0013645806944746295, | |
| "learning_rate": 1.9942263853964917e-07, | |
| "loss": 0.0, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 27.661290322580644, | |
| "grad_norm": 0.0016073814055334174, | |
| "learning_rate": 1.8651587694783924e-07, | |
| "loss": 0.0, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 27.741935483870968, | |
| "grad_norm": 0.0052368126966420795, | |
| "learning_rate": 1.7403293902206851e-07, | |
| "loss": 0.0, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 27.822580645161292, | |
| "grad_norm": 0.0016695147578140352, | |
| "learning_rate": 1.6197492387565629e-07, | |
| "loss": 0.0, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 27.903225806451612, | |
| "grad_norm": 0.0016759160456700358, | |
| "learning_rate": 1.503428932077916e-07, | |
| "loss": 0.0, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 27.983870967741936, | |
| "grad_norm": 0.002108637326817644, | |
| "learning_rate": 1.3913787121004717e-07, | |
| "loss": 0.0, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 28.06451612903226, | |
| "grad_norm": 0.0016869985552881567, | |
| "learning_rate": 1.2836084447620466e-07, | |
| "loss": 0.0, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 28.14516129032258, | |
| "grad_norm": 0.005277510382116748, | |
| "learning_rate": 1.180127619153837e-07, | |
| "loss": 0.0, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 28.225806451612904, | |
| "grad_norm": 0.0017776259721965467, | |
| "learning_rate": 1.0809453466849029e-07, | |
| "loss": 0.0, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 28.306451612903224, | |
| "grad_norm": 0.0017343115707303457, | |
| "learning_rate": 9.860703602799281e-08, | |
| "loss": 0.0, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 28.387096774193548, | |
| "grad_norm": 0.0018083897903790839, | |
| "learning_rate": 8.955110136102952e-08, | |
| "loss": 0.0, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 28.467741935483872, | |
| "grad_norm": 0.00170503799962613, | |
| "learning_rate": 8.092752803585513e-08, | |
| "loss": 0.0, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 28.548387096774192, | |
| "grad_norm": 0.0017763671542264713, | |
| "learning_rate": 7.273707535162988e-08, | |
| "loss": 0.0, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 28.629032258064516, | |
| "grad_norm": 0.002095601833692134, | |
| "learning_rate": 6.498046447156958e-08, | |
| "loss": 0.0, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 28.70967741935484, | |
| "grad_norm": 0.0021146427698936717, | |
| "learning_rate": 5.7658378359443104e-08, | |
| "loss": 0.0, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 28.79032258064516, | |
| "grad_norm": 0.012189733209084294, | |
| "learning_rate": 5.077146171943936e-08, | |
| "loss": 0.0, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 28.870967741935484, | |
| "grad_norm": 0.001939021815709666, | |
| "learning_rate": 4.432032093940219e-08, | |
| "loss": 0.0, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 28.951612903225808, | |
| "grad_norm": 0.0017207663057172715, | |
| "learning_rate": 3.8305524037438035e-08, | |
| "loss": 0.0, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 29.032258064516128, | |
| "grad_norm": 0.002480072613280019, | |
| "learning_rate": 3.27276006119015e-08, | |
| "loss": 0.0, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 29.112903225806452, | |
| "grad_norm": 0.001824979657478761, | |
| "learning_rate": 2.7587041794766012e-08, | |
| "loss": 0.0, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 29.193548387096776, | |
| "grad_norm": 0.0018857495214116063, | |
| "learning_rate": 2.2884300208378395e-08, | |
| "loss": 0.0, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 29.274193548387096, | |
| "grad_norm": 0.001641386602403516, | |
| "learning_rate": 1.8619789925608534e-08, | |
| "loss": 0.0, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 29.35483870967742, | |
| "grad_norm": 0.0019148208868270135, | |
| "learning_rate": 1.4793886433387417e-08, | |
| "loss": 0.0, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 29.43548387096774, | |
| "grad_norm": 0.001755356705920889, | |
| "learning_rate": 1.1406926599646373e-08, | |
| "loss": 0.0, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 29.516129032258064, | |
| "grad_norm": 0.0016780357056146887, | |
| "learning_rate": 8.459208643659122e-09, | |
| "loss": 0.0, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 29.596774193548388, | |
| "grad_norm": 0.0021748812816213365, | |
| "learning_rate": 5.950992109779452e-09, | |
| "loss": 0.0, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 29.677419354838708, | |
| "grad_norm": 0.0021491160683111835, | |
| "learning_rate": 3.88249784459227e-09, | |
| "loss": 0.0, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 29.758064516129032, | |
| "grad_norm": 0.001534770939524029, | |
| "learning_rate": 2.2539079774658303e-09, | |
| "loss": 0.0, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 29.838709677419356, | |
| "grad_norm": 0.0019699920227883878, | |
| "learning_rate": 1.0653659045156695e-09, | |
| "loss": 0.0, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 29.919354838709676, | |
| "grad_norm": 0.0022986410438944403, | |
| "learning_rate": 3.1697627597970794e-10, | |
| "loss": 0.0, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "grad_norm": 0.014875736397500602, | |
| "learning_rate": 8.804987003951937e-12, | |
| "loss": 0.0, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "step": 1860, | |
| "total_flos": 35446477946880.0, | |
| "train_loss": 0.05082031709623338, | |
| "train_runtime": 30158.8638, | |
| "train_samples_per_second": 1.967, | |
| "train_steps_per_second": 0.062 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1860, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 30, | |
| "save_steps": 310, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 35446477946880.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |