{ "best_metric": null, "best_model_checkpoint": null, "epoch": 3.0, "eval_steps": 1000, "global_step": 186, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.016129032258064516, "grad_norm": 25.0, "learning_rate": 4.973118279569893e-05, "loss": 50.0, "step": 1 }, { "epoch": 0.03225806451612903, "grad_norm": 23.625, "learning_rate": 4.9462365591397855e-05, "loss": 40.75, "step": 2 }, { "epoch": 0.04838709677419355, "grad_norm": 19.125, "learning_rate": 4.9193548387096775e-05, "loss": 30.25, "step": 3 }, { "epoch": 0.06451612903225806, "grad_norm": 7.8125, "learning_rate": 4.89247311827957e-05, "loss": 22.0, "step": 4 }, { "epoch": 0.08064516129032258, "grad_norm": 338.0, "learning_rate": 4.865591397849463e-05, "loss": 24.0, "step": 5 }, { "epoch": 0.0967741935483871, "grad_norm": 14.375, "learning_rate": 4.8387096774193554e-05, "loss": 22.125, "step": 6 }, { "epoch": 0.11290322580645161, "grad_norm": 14.5, "learning_rate": 4.811827956989248e-05, "loss": 19.125, "step": 7 }, { "epoch": 0.12903225806451613, "grad_norm": 8.0, "learning_rate": 4.78494623655914e-05, "loss": 18.625, "step": 8 }, { "epoch": 0.14516129032258066, "grad_norm": 9.6875, "learning_rate": 4.7580645161290326e-05, "loss": 18.5, "step": 9 }, { "epoch": 0.16129032258064516, "grad_norm": 8.375, "learning_rate": 4.731182795698925e-05, "loss": 17.125, "step": 10 }, { "epoch": 0.1774193548387097, "grad_norm": 6.71875, "learning_rate": 4.704301075268818e-05, "loss": 15.6875, "step": 11 }, { "epoch": 0.1935483870967742, "grad_norm": 10.375, "learning_rate": 4.67741935483871e-05, "loss": 15.625, "step": 12 }, { "epoch": 0.20967741935483872, "grad_norm": 13.6875, "learning_rate": 4.650537634408602e-05, "loss": 15.375, "step": 13 }, { "epoch": 0.22580645161290322, "grad_norm": 15.5, "learning_rate": 4.6236559139784944e-05, "loss": 11.8125, "step": 14 }, { "epoch": 0.24193548387096775, "grad_norm": 17.0, "learning_rate": 4.596774193548387e-05, "loss": 12.0625, "step": 15 }, { "epoch": 0.25806451612903225, "grad_norm": 15.625, "learning_rate": 4.56989247311828e-05, "loss": 11.0, "step": 16 }, { "epoch": 0.27419354838709675, "grad_norm": 18.5, "learning_rate": 4.543010752688172e-05, "loss": 11.0, "step": 17 }, { "epoch": 0.2903225806451613, "grad_norm": 15.25, "learning_rate": 4.516129032258064e-05, "loss": 8.6875, "step": 18 }, { "epoch": 0.3064516129032258, "grad_norm": 16.875, "learning_rate": 4.489247311827957e-05, "loss": 9.875, "step": 19 }, { "epoch": 0.3225806451612903, "grad_norm": 16.0, "learning_rate": 4.4623655913978496e-05, "loss": 10.25, "step": 20 }, { "epoch": 0.3387096774193548, "grad_norm": 16.625, "learning_rate": 4.435483870967742e-05, "loss": 9.1875, "step": 21 }, { "epoch": 0.3548387096774194, "grad_norm": 12.75, "learning_rate": 4.408602150537635e-05, "loss": 9.5625, "step": 22 }, { "epoch": 0.3709677419354839, "grad_norm": 14.75, "learning_rate": 4.381720430107527e-05, "loss": 9.125, "step": 23 }, { "epoch": 0.3870967741935484, "grad_norm": 13.375, "learning_rate": 4.3548387096774194e-05, "loss": 6.3438, "step": 24 }, { "epoch": 0.4032258064516129, "grad_norm": 12.6875, "learning_rate": 4.327956989247312e-05, "loss": 7.4062, "step": 25 }, { "epoch": 0.41935483870967744, "grad_norm": 13.25, "learning_rate": 4.301075268817205e-05, "loss": 6.6875, "step": 26 }, { "epoch": 0.43548387096774194, "grad_norm": 12.0, "learning_rate": 4.2741935483870973e-05, "loss": 5.8125, "step": 27 }, { "epoch": 0.45161290322580644, "grad_norm": 14.3125, "learning_rate": 4.247311827956989e-05, "loss": 6.4688, "step": 28 }, { "epoch": 0.46774193548387094, "grad_norm": 11.1875, "learning_rate": 4.220430107526882e-05, "loss": 5.5312, "step": 29 }, { "epoch": 0.4838709677419355, "grad_norm": 14.0, "learning_rate": 4.1935483870967746e-05, "loss": 6.0625, "step": 30 }, { "epoch": 0.5, "grad_norm": 11.875, "learning_rate": 4.166666666666667e-05, "loss": 4.5, "step": 31 }, { "epoch": 0.5161290322580645, "grad_norm": 11.0, "learning_rate": 4.13978494623656e-05, "loss": 6.75, "step": 32 }, { "epoch": 0.532258064516129, "grad_norm": 11.8125, "learning_rate": 4.112903225806452e-05, "loss": 6.625, "step": 33 }, { "epoch": 0.5483870967741935, "grad_norm": 11.5625, "learning_rate": 4.0860215053763444e-05, "loss": 4.2812, "step": 34 }, { "epoch": 0.5645161290322581, "grad_norm": 8.9375, "learning_rate": 4.0591397849462364e-05, "loss": 4.5938, "step": 35 }, { "epoch": 0.5806451612903226, "grad_norm": 10.75, "learning_rate": 4.032258064516129e-05, "loss": 6.125, "step": 36 }, { "epoch": 0.5967741935483871, "grad_norm": 9.5625, "learning_rate": 4.005376344086022e-05, "loss": 5.4062, "step": 37 }, { "epoch": 0.6129032258064516, "grad_norm": 9.4375, "learning_rate": 3.978494623655914e-05, "loss": 6.0, "step": 38 }, { "epoch": 0.6290322580645161, "grad_norm": 8.5625, "learning_rate": 3.951612903225806e-05, "loss": 3.7969, "step": 39 }, { "epoch": 0.6451612903225806, "grad_norm": 9.625, "learning_rate": 3.924731182795699e-05, "loss": 5.0, "step": 40 }, { "epoch": 0.6612903225806451, "grad_norm": 12.625, "learning_rate": 3.8978494623655915e-05, "loss": 7.0, "step": 41 }, { "epoch": 0.6774193548387096, "grad_norm": 8.5625, "learning_rate": 3.870967741935484e-05, "loss": 6.0938, "step": 42 }, { "epoch": 0.6935483870967742, "grad_norm": 11.625, "learning_rate": 3.844086021505376e-05, "loss": 6.0625, "step": 43 }, { "epoch": 0.7096774193548387, "grad_norm": 10.8125, "learning_rate": 3.817204301075269e-05, "loss": 4.2188, "step": 44 }, { "epoch": 0.7258064516129032, "grad_norm": 13.125, "learning_rate": 3.7903225806451614e-05, "loss": 5.6875, "step": 45 }, { "epoch": 0.7419354838709677, "grad_norm": 9.875, "learning_rate": 3.763440860215054e-05, "loss": 4.9688, "step": 46 }, { "epoch": 0.7580645161290323, "grad_norm": 8.75, "learning_rate": 3.736559139784947e-05, "loss": 4.0625, "step": 47 }, { "epoch": 0.7741935483870968, "grad_norm": 13.125, "learning_rate": 3.7096774193548386e-05, "loss": 6.2812, "step": 48 }, { "epoch": 0.7903225806451613, "grad_norm": 12.1875, "learning_rate": 3.682795698924731e-05, "loss": 5.125, "step": 49 }, { "epoch": 0.8064516129032258, "grad_norm": 9.5, "learning_rate": 3.655913978494624e-05, "loss": 4.5938, "step": 50 }, { "epoch": 0.8225806451612904, "grad_norm": 9.125, "learning_rate": 3.6290322580645165e-05, "loss": 3.4375, "step": 51 }, { "epoch": 0.8387096774193549, "grad_norm": 8.8125, "learning_rate": 3.602150537634409e-05, "loss": 5.1562, "step": 52 }, { "epoch": 0.8548387096774194, "grad_norm": 12.625, "learning_rate": 3.575268817204301e-05, "loss": 5.3125, "step": 53 }, { "epoch": 0.8709677419354839, "grad_norm": 10.8125, "learning_rate": 3.548387096774194e-05, "loss": 6.0312, "step": 54 }, { "epoch": 0.8870967741935484, "grad_norm": 10.0, "learning_rate": 3.5215053763440864e-05, "loss": 3.9844, "step": 55 }, { "epoch": 0.9032258064516129, "grad_norm": 7.15625, "learning_rate": 3.494623655913979e-05, "loss": 2.1562, "step": 56 }, { "epoch": 0.9193548387096774, "grad_norm": 9.1875, "learning_rate": 3.467741935483872e-05, "loss": 3.8438, "step": 57 }, { "epoch": 0.9354838709677419, "grad_norm": 9.1875, "learning_rate": 3.4408602150537636e-05, "loss": 4.4062, "step": 58 }, { "epoch": 0.9516129032258065, "grad_norm": 8.6875, "learning_rate": 3.4139784946236556e-05, "loss": 3.7969, "step": 59 }, { "epoch": 0.967741935483871, "grad_norm": 7.84375, "learning_rate": 3.387096774193548e-05, "loss": 4.3125, "step": 60 }, { "epoch": 0.9838709677419355, "grad_norm": 8.375, "learning_rate": 3.360215053763441e-05, "loss": 3.2812, "step": 61 }, { "epoch": 1.0, "grad_norm": 9.5625, "learning_rate": 3.3333333333333335e-05, "loss": 4.5625, "step": 62 }, { "epoch": 1.0161290322580645, "grad_norm": 4.53125, "learning_rate": 3.306451612903226e-05, "loss": 1.6016, "step": 63 }, { "epoch": 1.032258064516129, "grad_norm": 5.5625, "learning_rate": 3.279569892473118e-05, "loss": 1.8984, "step": 64 }, { "epoch": 1.0483870967741935, "grad_norm": 5.625, "learning_rate": 3.252688172043011e-05, "loss": 1.3438, "step": 65 }, { "epoch": 1.064516129032258, "grad_norm": 6.03125, "learning_rate": 3.2258064516129034e-05, "loss": 1.7109, "step": 66 }, { "epoch": 1.0806451612903225, "grad_norm": 6.40625, "learning_rate": 3.198924731182796e-05, "loss": 2.4219, "step": 67 }, { "epoch": 1.096774193548387, "grad_norm": 7.84375, "learning_rate": 3.172043010752688e-05, "loss": 2.6094, "step": 68 }, { "epoch": 1.1129032258064515, "grad_norm": 4.90625, "learning_rate": 3.1451612903225806e-05, "loss": 1.2031, "step": 69 }, { "epoch": 1.129032258064516, "grad_norm": 7.34375, "learning_rate": 3.118279569892473e-05, "loss": 1.5703, "step": 70 }, { "epoch": 1.1451612903225807, "grad_norm": 9.1875, "learning_rate": 3.091397849462366e-05, "loss": 2.0469, "step": 71 }, { "epoch": 1.1612903225806452, "grad_norm": 6.46875, "learning_rate": 3.0645161290322585e-05, "loss": 0.9648, "step": 72 }, { "epoch": 1.1774193548387097, "grad_norm": 9.375, "learning_rate": 3.0376344086021508e-05, "loss": 2.2031, "step": 73 }, { "epoch": 1.1935483870967742, "grad_norm": 10.375, "learning_rate": 3.010752688172043e-05, "loss": 2.5938, "step": 74 }, { "epoch": 1.2096774193548387, "grad_norm": 4.625, "learning_rate": 2.9838709677419357e-05, "loss": 1.125, "step": 75 }, { "epoch": 1.2258064516129032, "grad_norm": 5.09375, "learning_rate": 2.9569892473118284e-05, "loss": 1.0625, "step": 76 }, { "epoch": 1.2419354838709677, "grad_norm": 6.71875, "learning_rate": 2.9301075268817207e-05, "loss": 1.8672, "step": 77 }, { "epoch": 1.2580645161290323, "grad_norm": 7.53125, "learning_rate": 2.9032258064516133e-05, "loss": 2.2656, "step": 78 }, { "epoch": 1.2741935483870968, "grad_norm": 6.5, "learning_rate": 2.8763440860215056e-05, "loss": 2.0625, "step": 79 }, { "epoch": 1.2903225806451613, "grad_norm": 3.5, "learning_rate": 2.8494623655913982e-05, "loss": 0.8086, "step": 80 }, { "epoch": 1.3064516129032258, "grad_norm": 4.5625, "learning_rate": 2.822580645161291e-05, "loss": 1.6797, "step": 81 }, { "epoch": 1.3225806451612903, "grad_norm": 5.0625, "learning_rate": 2.7956989247311828e-05, "loss": 1.5391, "step": 82 }, { "epoch": 1.3387096774193548, "grad_norm": 6.96875, "learning_rate": 2.768817204301075e-05, "loss": 1.8438, "step": 83 }, { "epoch": 1.3548387096774195, "grad_norm": 4.375, "learning_rate": 2.7419354838709678e-05, "loss": 1.2812, "step": 84 }, { "epoch": 1.370967741935484, "grad_norm": 5.8125, "learning_rate": 2.71505376344086e-05, "loss": 1.3359, "step": 85 }, { "epoch": 1.3870967741935485, "grad_norm": 4.6875, "learning_rate": 2.6881720430107527e-05, "loss": 0.9766, "step": 86 }, { "epoch": 1.403225806451613, "grad_norm": 8.6875, "learning_rate": 2.661290322580645e-05, "loss": 2.4219, "step": 87 }, { "epoch": 1.4193548387096775, "grad_norm": 6.6875, "learning_rate": 2.6344086021505376e-05, "loss": 1.3672, "step": 88 }, { "epoch": 1.435483870967742, "grad_norm": 8.375, "learning_rate": 2.6075268817204303e-05, "loss": 1.8828, "step": 89 }, { "epoch": 1.4516129032258065, "grad_norm": 5.25, "learning_rate": 2.5806451612903226e-05, "loss": 1.3984, "step": 90 }, { "epoch": 1.467741935483871, "grad_norm": 4.625, "learning_rate": 2.5537634408602152e-05, "loss": 0.8516, "step": 91 }, { "epoch": 1.4838709677419355, "grad_norm": 7.8125, "learning_rate": 2.5268817204301075e-05, "loss": 2.4844, "step": 92 }, { "epoch": 1.5, "grad_norm": 5.25, "learning_rate": 2.5e-05, "loss": 0.8594, "step": 93 }, { "epoch": 1.5161290322580645, "grad_norm": 5.75, "learning_rate": 2.4731182795698928e-05, "loss": 1.5078, "step": 94 }, { "epoch": 1.532258064516129, "grad_norm": 5.625, "learning_rate": 2.446236559139785e-05, "loss": 1.625, "step": 95 }, { "epoch": 1.5483870967741935, "grad_norm": 5.0, "learning_rate": 2.4193548387096777e-05, "loss": 1.1016, "step": 96 }, { "epoch": 1.564516129032258, "grad_norm": 4.5, "learning_rate": 2.39247311827957e-05, "loss": 2.0, "step": 97 }, { "epoch": 1.5806451612903225, "grad_norm": 5.53125, "learning_rate": 2.3655913978494626e-05, "loss": 1.4766, "step": 98 }, { "epoch": 1.596774193548387, "grad_norm": 4.15625, "learning_rate": 2.338709677419355e-05, "loss": 0.7227, "step": 99 }, { "epoch": 1.6129032258064515, "grad_norm": 5.15625, "learning_rate": 2.3118279569892472e-05, "loss": 1.0781, "step": 100 }, { "epoch": 1.629032258064516, "grad_norm": 2.96875, "learning_rate": 2.28494623655914e-05, "loss": 0.5977, "step": 101 }, { "epoch": 1.6451612903225805, "grad_norm": 7.78125, "learning_rate": 2.258064516129032e-05, "loss": 2.1562, "step": 102 }, { "epoch": 1.661290322580645, "grad_norm": 5.0625, "learning_rate": 2.2311827956989248e-05, "loss": 1.1172, "step": 103 }, { "epoch": 1.6774193548387095, "grad_norm": 4.4375, "learning_rate": 2.2043010752688174e-05, "loss": 0.5586, "step": 104 }, { "epoch": 1.6935483870967742, "grad_norm": 6.21875, "learning_rate": 2.1774193548387097e-05, "loss": 1.5469, "step": 105 }, { "epoch": 1.7096774193548387, "grad_norm": 6.40625, "learning_rate": 2.1505376344086024e-05, "loss": 1.2891, "step": 106 }, { "epoch": 1.7258064516129032, "grad_norm": 6.0625, "learning_rate": 2.1236559139784946e-05, "loss": 2.6719, "step": 107 }, { "epoch": 1.7419354838709677, "grad_norm": 10.5625, "learning_rate": 2.0967741935483873e-05, "loss": 2.2031, "step": 108 }, { "epoch": 1.7580645161290323, "grad_norm": 10.0625, "learning_rate": 2.06989247311828e-05, "loss": 2.1094, "step": 109 }, { "epoch": 1.7741935483870968, "grad_norm": 7.53125, "learning_rate": 2.0430107526881722e-05, "loss": 2.4375, "step": 110 }, { "epoch": 1.7903225806451613, "grad_norm": 6.5625, "learning_rate": 2.0161290322580645e-05, "loss": 1.4922, "step": 111 }, { "epoch": 1.8064516129032258, "grad_norm": 9.3125, "learning_rate": 1.989247311827957e-05, "loss": 2.6406, "step": 112 }, { "epoch": 1.8225806451612905, "grad_norm": 5.625, "learning_rate": 1.9623655913978494e-05, "loss": 0.8398, "step": 113 }, { "epoch": 1.838709677419355, "grad_norm": 2.828125, "learning_rate": 1.935483870967742e-05, "loss": 0.4219, "step": 114 }, { "epoch": 1.8548387096774195, "grad_norm": 6.03125, "learning_rate": 1.9086021505376344e-05, "loss": 0.9766, "step": 115 }, { "epoch": 1.870967741935484, "grad_norm": 4.59375, "learning_rate": 1.881720430107527e-05, "loss": 0.7578, "step": 116 }, { "epoch": 1.8870967741935485, "grad_norm": 1.40625, "learning_rate": 1.8548387096774193e-05, "loss": 0.2539, "step": 117 }, { "epoch": 1.903225806451613, "grad_norm": 4.40625, "learning_rate": 1.827956989247312e-05, "loss": 0.9102, "step": 118 }, { "epoch": 1.9193548387096775, "grad_norm": 6.03125, "learning_rate": 1.8010752688172046e-05, "loss": 1.5469, "step": 119 }, { "epoch": 1.935483870967742, "grad_norm": 8.125, "learning_rate": 1.774193548387097e-05, "loss": 1.6172, "step": 120 }, { "epoch": 1.9516129032258065, "grad_norm": 4.53125, "learning_rate": 1.7473118279569895e-05, "loss": 0.5195, "step": 121 }, { "epoch": 1.967741935483871, "grad_norm": 6.53125, "learning_rate": 1.7204301075268818e-05, "loss": 1.1797, "step": 122 }, { "epoch": 1.9838709677419355, "grad_norm": 5.84375, "learning_rate": 1.693548387096774e-05, "loss": 1.3125, "step": 123 }, { "epoch": 2.0, "grad_norm": 2.09375, "learning_rate": 1.6666666666666667e-05, "loss": 0.5352, "step": 124 }, { "epoch": 2.0161290322580645, "grad_norm": 2.546875, "learning_rate": 1.639784946236559e-05, "loss": 0.3926, "step": 125 }, { "epoch": 2.032258064516129, "grad_norm": 3.90625, "learning_rate": 1.6129032258064517e-05, "loss": 0.2715, "step": 126 }, { "epoch": 2.0483870967741935, "grad_norm": 3.375, "learning_rate": 1.586021505376344e-05, "loss": 0.7422, "step": 127 }, { "epoch": 2.064516129032258, "grad_norm": 2.453125, "learning_rate": 1.5591397849462366e-05, "loss": 0.332, "step": 128 }, { "epoch": 2.0806451612903225, "grad_norm": 1.1484375, "learning_rate": 1.5322580645161292e-05, "loss": 0.1719, "step": 129 }, { "epoch": 2.096774193548387, "grad_norm": 1.0390625, "learning_rate": 1.5053763440860215e-05, "loss": 0.1475, "step": 130 }, { "epoch": 2.1129032258064515, "grad_norm": 2.625, "learning_rate": 1.4784946236559142e-05, "loss": 0.4004, "step": 131 }, { "epoch": 2.129032258064516, "grad_norm": 4.34375, "learning_rate": 1.4516129032258066e-05, "loss": 0.2656, "step": 132 }, { "epoch": 2.1451612903225805, "grad_norm": 0.984375, "learning_rate": 1.4247311827956991e-05, "loss": 0.1357, "step": 133 }, { "epoch": 2.161290322580645, "grad_norm": 1.40625, "learning_rate": 1.3978494623655914e-05, "loss": 0.1582, "step": 134 }, { "epoch": 2.1774193548387095, "grad_norm": 2.625, "learning_rate": 1.3709677419354839e-05, "loss": 0.4531, "step": 135 }, { "epoch": 2.193548387096774, "grad_norm": 4.3125, "learning_rate": 1.3440860215053763e-05, "loss": 0.3574, "step": 136 }, { "epoch": 2.2096774193548385, "grad_norm": 1.1015625, "learning_rate": 1.3172043010752688e-05, "loss": 0.1387, "step": 137 }, { "epoch": 2.225806451612903, "grad_norm": 0.921875, "learning_rate": 1.2903225806451613e-05, "loss": 0.2852, "step": 138 }, { "epoch": 2.241935483870968, "grad_norm": 1.0859375, "learning_rate": 1.2634408602150537e-05, "loss": 0.1357, "step": 139 }, { "epoch": 2.258064516129032, "grad_norm": 0.75, "learning_rate": 1.2365591397849464e-05, "loss": 0.0781, "step": 140 }, { "epoch": 2.274193548387097, "grad_norm": 4.90625, "learning_rate": 1.2096774193548388e-05, "loss": 1.4297, "step": 141 }, { "epoch": 2.2903225806451615, "grad_norm": 1.015625, "learning_rate": 1.1827956989247313e-05, "loss": 0.1338, "step": 142 }, { "epoch": 2.306451612903226, "grad_norm": 0.67578125, "learning_rate": 1.1559139784946236e-05, "loss": 0.0981, "step": 143 }, { "epoch": 2.3225806451612905, "grad_norm": 2.703125, "learning_rate": 1.129032258064516e-05, "loss": 0.4043, "step": 144 }, { "epoch": 2.338709677419355, "grad_norm": 1.953125, "learning_rate": 1.1021505376344087e-05, "loss": 0.1592, "step": 145 }, { "epoch": 2.3548387096774195, "grad_norm": 2.875, "learning_rate": 1.0752688172043012e-05, "loss": 0.5117, "step": 146 }, { "epoch": 2.370967741935484, "grad_norm": 1.0703125, "learning_rate": 1.0483870967741936e-05, "loss": 0.1611, "step": 147 }, { "epoch": 2.3870967741935485, "grad_norm": 0.74609375, "learning_rate": 1.0215053763440861e-05, "loss": 0.1182, "step": 148 }, { "epoch": 2.403225806451613, "grad_norm": 1.2265625, "learning_rate": 9.946236559139786e-06, "loss": 0.1445, "step": 149 }, { "epoch": 2.4193548387096775, "grad_norm": 1.484375, "learning_rate": 9.67741935483871e-06, "loss": 0.1816, "step": 150 }, { "epoch": 2.435483870967742, "grad_norm": 0.56640625, "learning_rate": 9.408602150537635e-06, "loss": 0.0811, "step": 151 }, { "epoch": 2.4516129032258065, "grad_norm": 1.625, "learning_rate": 9.13978494623656e-06, "loss": 0.5469, "step": 152 }, { "epoch": 2.467741935483871, "grad_norm": 2.109375, "learning_rate": 8.870967741935484e-06, "loss": 0.2285, "step": 153 }, { "epoch": 2.4838709677419355, "grad_norm": 2.96875, "learning_rate": 8.602150537634409e-06, "loss": 0.3691, "step": 154 }, { "epoch": 2.5, "grad_norm": 1.0703125, "learning_rate": 8.333333333333334e-06, "loss": 0.0923, "step": 155 }, { "epoch": 2.5161290322580645, "grad_norm": 4.09375, "learning_rate": 8.064516129032258e-06, "loss": 0.3828, "step": 156 }, { "epoch": 2.532258064516129, "grad_norm": 1.8515625, "learning_rate": 7.795698924731183e-06, "loss": 0.2441, "step": 157 }, { "epoch": 2.5483870967741935, "grad_norm": 2.921875, "learning_rate": 7.526881720430108e-06, "loss": 0.5, "step": 158 }, { "epoch": 2.564516129032258, "grad_norm": 1.171875, "learning_rate": 7.258064516129033e-06, "loss": 0.1611, "step": 159 }, { "epoch": 2.5806451612903225, "grad_norm": 1.53125, "learning_rate": 6.989247311827957e-06, "loss": 0.1914, "step": 160 }, { "epoch": 2.596774193548387, "grad_norm": 2.421875, "learning_rate": 6.720430107526882e-06, "loss": 0.2041, "step": 161 }, { "epoch": 2.6129032258064515, "grad_norm": 2.015625, "learning_rate": 6.451612903225806e-06, "loss": 0.2793, "step": 162 }, { "epoch": 2.629032258064516, "grad_norm": 1.875, "learning_rate": 6.182795698924732e-06, "loss": 0.4238, "step": 163 }, { "epoch": 2.6451612903225805, "grad_norm": 1.7578125, "learning_rate": 5.9139784946236566e-06, "loss": 0.3535, "step": 164 }, { "epoch": 2.661290322580645, "grad_norm": 3.984375, "learning_rate": 5.64516129032258e-06, "loss": 0.2988, "step": 165 }, { "epoch": 2.6774193548387095, "grad_norm": 3.90625, "learning_rate": 5.376344086021506e-06, "loss": 0.6875, "step": 166 }, { "epoch": 2.693548387096774, "grad_norm": 2.375, "learning_rate": 5.1075268817204305e-06, "loss": 0.2949, "step": 167 }, { "epoch": 2.709677419354839, "grad_norm": 0.640625, "learning_rate": 4.838709677419355e-06, "loss": 0.084, "step": 168 }, { "epoch": 2.725806451612903, "grad_norm": 3.25, "learning_rate": 4.56989247311828e-06, "loss": 0.2334, "step": 169 }, { "epoch": 2.741935483870968, "grad_norm": 2.265625, "learning_rate": 4.3010752688172045e-06, "loss": 0.2158, "step": 170 }, { "epoch": 2.758064516129032, "grad_norm": 2.3125, "learning_rate": 4.032258064516129e-06, "loss": 0.2471, "step": 171 }, { "epoch": 2.774193548387097, "grad_norm": 1.2734375, "learning_rate": 3.763440860215054e-06, "loss": 0.1377, "step": 172 }, { "epoch": 2.790322580645161, "grad_norm": 1.3828125, "learning_rate": 3.4946236559139785e-06, "loss": 0.1885, "step": 173 }, { "epoch": 2.806451612903226, "grad_norm": 2.28125, "learning_rate": 3.225806451612903e-06, "loss": 0.209, "step": 174 }, { "epoch": 2.8225806451612905, "grad_norm": 1.90625, "learning_rate": 2.9569892473118283e-06, "loss": 0.3027, "step": 175 }, { "epoch": 2.838709677419355, "grad_norm": 2.96875, "learning_rate": 2.688172043010753e-06, "loss": 0.2295, "step": 176 }, { "epoch": 2.8548387096774195, "grad_norm": 4.03125, "learning_rate": 2.4193548387096776e-06, "loss": 0.5352, "step": 177 }, { "epoch": 2.870967741935484, "grad_norm": 1.2421875, "learning_rate": 2.1505376344086023e-06, "loss": 0.1216, "step": 178 }, { "epoch": 2.8870967741935485, "grad_norm": 2.28125, "learning_rate": 1.881720430107527e-06, "loss": 0.3145, "step": 179 }, { "epoch": 2.903225806451613, "grad_norm": 4.625, "learning_rate": 1.6129032258064516e-06, "loss": 0.8047, "step": 180 }, { "epoch": 2.9193548387096775, "grad_norm": 1.0, "learning_rate": 1.3440860215053765e-06, "loss": 0.1157, "step": 181 }, { "epoch": 2.935483870967742, "grad_norm": 3.3125, "learning_rate": 1.0752688172043011e-06, "loss": 0.5586, "step": 182 }, { "epoch": 2.9516129032258065, "grad_norm": 1.5078125, "learning_rate": 8.064516129032258e-07, "loss": 0.1484, "step": 183 }, { "epoch": 2.967741935483871, "grad_norm": 0.91796875, "learning_rate": 5.376344086021506e-07, "loss": 0.0986, "step": 184 }, { "epoch": 2.9838709677419355, "grad_norm": 2.78125, "learning_rate": 2.688172043010753e-07, "loss": 0.4648, "step": 185 }, { "epoch": 3.0, "grad_norm": 1.8125, "learning_rate": 0.0, "loss": 0.2148, "step": 186 } ], "logging_steps": 1, "max_steps": 186, "num_input_tokens_seen": 0, "num_train_epochs": 3, "save_steps": 1000, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 0.0, "train_batch_size": 4, "trial_name": null, "trial_params": null }