| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "global_step": 297, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 0.6895, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.7129, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5e-06, | |
| "loss": 0.7467, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.6553, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.6908, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6292, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.1666666666666668e-05, | |
| "loss": 0.648, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.6291, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.6205, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.5143, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.8333333333333333e-05, | |
| "loss": 0.5343, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2e-05, | |
| "loss": 0.6127, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9999392458943432e-05, | |
| "loss": 0.5313, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9997569909594948e-05, | |
| "loss": 0.486, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.999453257340926e-05, | |
| "loss": 0.5457, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9990280819447662e-05, | |
| "loss": 0.5688, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9984815164333163e-05, | |
| "loss": 0.5523, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9978136272187745e-05, | |
| "loss": 0.5291, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9970244954551648e-05, | |
| "loss": 0.4995, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9961142170284762e-05, | |
| "loss": 0.4819, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9950829025450116e-05, | |
| "loss": 0.485, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9939306773179498e-05, | |
| "loss": 0.5592, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9926576813521167e-05, | |
| "loss": 0.5018, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9912640693269754e-05, | |
| "loss": 0.5148, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.98975001057783e-05, | |
| "loss": 0.4703, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9881156890752517e-05, | |
| "loss": 0.46, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9863613034027224e-05, | |
| "loss": 0.4772, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9844870667325073e-05, | |
| "loss": 0.4644, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9824932067997516e-05, | |
| "loss": 0.43, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9803799658748096e-05, | |
| "loss": 0.4792, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9781476007338058e-05, | |
| "loss": 0.4879, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9757963826274357e-05, | |
| "loss": 0.466, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.973326597248006e-05, | |
| "loss": 0.4972, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.97073854469472e-05, | |
| "loss": 0.4756, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.968032539437215e-05, | |
| "loss": 0.5015, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9652089102773487e-05, | |
| "loss": 0.4449, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9622680003092503e-05, | |
| "loss": 0.411, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.95921016687763e-05, | |
| "loss": 0.4184, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9560357815343577e-05, | |
| "loss": 0.4779, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.9527452299933192e-05, | |
| "loss": 0.4778, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9493389120835462e-05, | |
| "loss": 0.3891, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9458172417006347e-05, | |
| "loss": 0.4235, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9421806467564546e-05, | |
| "loss": 0.4597, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9384295691271523e-05, | |
| "loss": 0.4506, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.934564464599461e-05, | |
| "loss": 0.4283, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9305858028153186e-05, | |
| "loss": 0.414, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9264940672148018e-05, | |
| "loss": 0.3809, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.922289754977385e-05, | |
| "loss": 0.4041, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9179733769615273e-05, | |
| "loss": 0.4192, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.913545457642601e-05, | |
| "loss": 0.4638, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.909006535049163e-05, | |
| "loss": 0.3532, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.9043571606975776e-05, | |
| "loss": 0.3824, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.899597899525007e-05, | |
| "loss": 0.4066, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8947293298207637e-05, | |
| "loss": 0.4117, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8897520431560435e-05, | |
| "loss": 0.4037, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.884666644312046e-05, | |
| "loss": 0.4009, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.879473751206489e-05, | |
| "loss": 0.3139, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8741739948185256e-05, | |
| "loss": 0.4352, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8687680191120746e-05, | |
| "loss": 0.4189, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.863256480957574e-05, | |
| "loss": 0.4013, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8576400500521673e-05, | |
| "loss": 0.416, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.851919408838327e-05, | |
| "loss": 0.4034, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8460952524209355e-05, | |
| "loss": 0.3081, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8401682884828212e-05, | |
| "loss": 0.4454, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.83413923719877e-05, | |
| "loss": 0.4245, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.8280088311480203e-05, | |
| "loss": 0.4159, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.821777815225245e-05, | |
| "loss": 0.3682, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.8154469465500447e-05, | |
| "loss": 0.3361, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 0.4249, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.802488739991941e-05, | |
| "loss": 0.3483, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.7958629766375387e-05, | |
| "loss": 0.3563, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.789140509396394e-05, | |
| "loss": 0.3829, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.7823221551034766e-05, | |
| "loss": 0.333, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7754087422448217e-05, | |
| "loss": 0.3394, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.7684011108568593e-05, | |
| "loss": 0.2989, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.7613001124243448e-05, | |
| "loss": 0.3513, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7541066097768965e-05, | |
| "loss": 0.4423, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.7468214769841542e-05, | |
| "loss": 0.3345, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.7394455992495722e-05, | |
| "loss": 0.3538, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7319798728028617e-05, | |
| "loss": 0.311, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7244252047910893e-05, | |
| "loss": 0.3137, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.7167825131684516e-05, | |
| "loss": 0.2986, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.7090527265847375e-05, | |
| "loss": 0.3376, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.7012367842724887e-05, | |
| "loss": 0.2555, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6933356359328756e-05, | |
| "loss": 0.2784, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.6853502416203e-05, | |
| "loss": 0.4175, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.6772815716257414e-05, | |
| "loss": 0.402, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6691306063588583e-05, | |
| "loss": 0.3356, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.6608983362288612e-05, | |
| "loss": 0.3317, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6525857615241686e-05, | |
| "loss": 0.3149, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.6441938922908644e-05, | |
| "loss": 0.291, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.6357237482099682e-05, | |
| "loss": 0.3862, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6271763584735373e-05, | |
| "loss": 0.3611, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.6185527616596096e-05, | |
| "loss": 0.3326, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.609854005606009e-05, | |
| "loss": 0.4092, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.6010811472830253e-05, | |
| "loss": 0.313, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.5922352526649803e-05, | |
| "loss": 0.3875, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.583317396600707e-05, | |
| "loss": 0.3525, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.5743286626829437e-05, | |
| "loss": 0.3531, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.565270143116672e-05, | |
| "loss": 0.3013, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.5561429385864005e-05, | |
| "loss": 0.3702, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5469481581224274e-05, | |
| "loss": 0.3134, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.5376869189660784e-05, | |
| "loss": 0.334, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.528360346433959e-05, | |
| "loss": 0.3265, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.5189695737812153e-05, | |
| "loss": 0.2694, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.5095157420638349e-05, | |
| "loss": 0.3163, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.273, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.4904235038305084e-05, | |
| "loss": 0.2774, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.4807874171782795e-05, | |
| "loss": 0.3119, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.4710929109069674e-05, | |
| "loss": 0.3072, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.461341162978688e-05, | |
| "loss": 0.2993, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.4515333583108896e-05, | |
| "loss": 0.2969, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.4416706886323741e-05, | |
| "loss": 0.2728, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.4317543523384928e-05, | |
| "loss": 0.282, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.4217855543455323e-05, | |
| "loss": 0.2412, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.4117655059443052e-05, | |
| "loss": 0.2805, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.4016954246529697e-05, | |
| "loss": 0.2515, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.3915765340690916e-05, | |
| "loss": 0.246, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.3814100637209663e-05, | |
| "loss": 0.272, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.3711972489182208e-05, | |
| "loss": 0.2769, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.3609393306017149e-05, | |
| "loss": 0.2189, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.3506375551927546e-05, | |
| "loss": 0.2174, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.3402931744416432e-05, | |
| "loss": 0.2252, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.3299074452755829e-05, | |
| "loss": 0.2361, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.3194816296459483e-05, | |
| "loss": 0.2545, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 0.2656, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.2985148110016947e-05, | |
| "loss": 0.2973, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.2879763556277062e-05, | |
| "loss": 0.2965, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.2774029087618448e-05, | |
| "loss": 0.2444, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.2667957551647263e-05, | |
| "loss": 0.2817, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.2561561836926115e-05, | |
| "loss": 0.2908, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.2454854871407993e-05, | |
| "loss": 0.2854, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.234784962086541e-05, | |
| "loss": 0.2955, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.224055908731496e-05, | |
| "loss": 0.3027, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.213299630743747e-05, | |
| "loss": 0.2459, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.2025174350993923e-05, | |
| "loss": 0.2663, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.1917106319237386e-05, | |
| "loss": 0.22, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.1808805343321102e-05, | |
| "loss": 0.2549, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.1700284582702933e-05, | |
| "loss": 0.2712, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.1591557223546394e-05, | |
| "loss": 0.2347, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.148263647711842e-05, | |
| "loss": 0.2478, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.1373535578184083e-05, | |
| "loss": 0.2049, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.1264267783398463e-05, | |
| "loss": 0.2375, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.1154846369695864e-05, | |
| "loss": 0.2893, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.1045284632676535e-05, | |
| "loss": 0.2361, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.093559588499118e-05, | |
| "loss": 0.1738, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.0825793454723325e-05, | |
| "loss": 0.2037, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.0715890683769872e-05, | |
| "loss": 0.2048, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.060590092621994e-05, | |
| "loss": 0.1777, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.0495837546732224e-05, | |
| "loss": 0.2889, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.0385713918911104e-05, | |
| "loss": 0.2363, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.0275543423681622e-05, | |
| "loss": 0.2456, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.0165339447663586e-05, | |
| "loss": 0.236, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.0055115381545006e-05, | |
| "loss": 0.2409, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.944884618454996e-06, | |
| "loss": 0.2758, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.834660552336415e-06, | |
| "loss": 0.1752, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.724456576318383e-06, | |
| "loss": 0.2372, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.614286081088895e-06, | |
| "loss": 0.2148, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.504162453267776e-06, | |
| "loss": 0.1684, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.394099073780066e-06, | |
| "loss": 0.2243, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.284109316230133e-06, | |
| "loss": 0.2365, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.174206545276678e-06, | |
| "loss": 0.2333, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.064404115008824e-06, | |
| "loss": 0.2108, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.954715367323468e-06, | |
| "loss": 0.283, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.84515363030414e-06, | |
| "loss": 0.2383, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.735732216601538e-06, | |
| "loss": 0.1921, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.626464421815919e-06, | |
| "loss": 0.2585, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.51736352288158e-06, | |
| "loss": 0.2201, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.408442776453606e-06, | |
| "loss": 0.2088, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.299715417297072e-06, | |
| "loss": 0.1736, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.191194656678905e-06, | |
| "loss": 0.2028, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.082893680762619e-06, | |
| "loss": 0.2331, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.974825649006082e-06, | |
| "loss": 0.2313, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 7.867003692562533e-06, | |
| "loss": 0.3048, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 7.759440912685043e-06, | |
| "loss": 0.2014, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.652150379134593e-06, | |
| "loss": 0.2098, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 7.545145128592009e-06, | |
| "loss": 0.2076, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.438438163073884e-06, | |
| "loss": 0.1652, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.3320424483527385e-06, | |
| "loss": 0.1559, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.225970912381557e-06, | |
| "loss": 0.2062, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 7.120236443722941e-06, | |
| "loss": 0.2017, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 7.014851889983058e-06, | |
| "loss": 0.1794, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 0.1932, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 6.80518370354052e-06, | |
| "loss": 0.2138, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 6.700925547244173e-06, | |
| "loss": 0.1776, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 6.59706825558357e-06, | |
| "loss": 0.1961, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 6.4936244480724575e-06, | |
| "loss": 0.2006, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 6.3906066939828546e-06, | |
| "loss": 0.263, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 6.2880275108177915e-06, | |
| "loss": 0.201, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 6.18589936279034e-06, | |
| "loss": 0.1588, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 6.084234659309088e-06, | |
| "loss": 0.216, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 5.983045753470308e-06, | |
| "loss": 0.1918, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.8823449405569525e-06, | |
| "loss": 0.184, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.782144456544681e-06, | |
| "loss": 0.1564, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 5.6824564766150724e-06, | |
| "loss": 0.1936, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.58329311367626e-06, | |
| "loss": 0.1495, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 5.484666416891109e-06, | |
| "loss": 0.2793, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 5.386588370213124e-06, | |
| "loss": 0.1719, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 5.289070890930328e-06, | |
| "loss": 0.194, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 5.192125828217203e-06, | |
| "loss": 0.1358, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 5.095764961694923e-06, | |
| "loss": 0.16, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.1797, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.904842579361653e-06, | |
| "loss": 0.1968, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 4.8103042621878515e-06, | |
| "loss": 0.1639, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.716396535660412e-06, | |
| "loss": 0.1747, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.623130810339219e-06, | |
| "loss": 0.2478, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.530518418775734e-06, | |
| "loss": 0.1603, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.438570614135994e-06, | |
| "loss": 0.1551, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.347298568833281e-06, | |
| "loss": 0.1762, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.256713373170565e-06, | |
| "loss": 0.1881, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 4.166826033992939e-06, | |
| "loss": 0.1405, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.077647473350201e-06, | |
| "loss": 0.1996, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.989188527169749e-06, | |
| "loss": 0.1252, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 3.90145994393991e-06, | |
| "loss": 0.1156, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.8144723834039076e-06, | |
| "loss": 0.1462, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.72823641526463e-06, | |
| "loss": 0.2174, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 3.6427625179003223e-06, | |
| "loss": 0.2678, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.5580610770913593e-06, | |
| "loss": 0.1681, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.4741423847583134e-06, | |
| "loss": 0.2125, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.3910166377113894e-06, | |
| "loss": 0.1816, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.308693936411421e-06, | |
| "loss": 0.1421, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 3.2271842837425917e-06, | |
| "loss": 0.1898, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.1464975837970035e-06, | |
| "loss": 0.1321, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.0666436406712485e-06, | |
| "loss": 0.1529, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 2.9876321572751143e-06, | |
| "loss": 0.1399, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.9094727341526275e-06, | |
| "loss": 0.1596, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.8321748683154893e-06, | |
| "loss": 0.1354, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 2.7557479520891104e-06, | |
| "loss": 0.1474, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.680201271971383e-06, | |
| "loss": 0.1558, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 2.6055440075042793e-06, | |
| "loss": 0.1595, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.5317852301584642e-06, | |
| "loss": 0.1791, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 2.4589339022310386e-06, | |
| "loss": 0.1387, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.386998875756554e-06, | |
| "loss": 0.1032, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.315988891431412e-06, | |
| "loss": 0.1711, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 2.2459125775517854e-06, | |
| "loss": 0.1274, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.1767784489652345e-06, | |
| "loss": 0.1584, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.1085949060360654e-06, | |
| "loss": 0.1374, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 2.0413702336246156e-06, | |
| "loss": 0.1403, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.97511260008059e-06, | |
| "loss": 0.1533, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.1528, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.8455305344995523e-06, | |
| "loss": 0.1275, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.7822218477475496e-06, | |
| "loss": 0.209, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.7199116885197996e-06, | |
| "loss": 0.1208, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.6586076280123032e-06, | |
| "loss": 0.0937, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.5983171151717924e-06, | |
| "loss": 0.1244, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.5390474757906449e-06, | |
| "loss": 0.1131, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.4808059116167306e-06, | |
| "loss": 0.1315, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.4235994994783297e-06, | |
| "loss": 0.1891, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.367435190424261e-06, | |
| "loss": 0.1316, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.3123198088792577e-06, | |
| "loss": 0.155, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.2582600518147448e-06, | |
| "loss": 0.1472, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.2052624879351105e-06, | |
| "loss": 0.1732, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.1533335568795412e-06, | |
| "loss": 0.1536, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.1024795684395695e-06, | |
| "loss": 0.1176, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.0527067017923654e-06, | |
| "loss": 0.1502, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 1.0040210047499289e-06, | |
| "loss": 0.1859, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.564283930242258e-07, | |
| "loss": 0.1167, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.09934649508375e-07, | |
| "loss": 0.1391, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.645454235739903e-07, | |
| "loss": 0.1438, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 8.202662303847298e-07, | |
| "loss": 0.1527, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 7.771024502261526e-07, | |
| "loss": 0.1238, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 7.350593278519824e-07, | |
| "loss": 0.1709, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 6.941419718468168e-07, | |
| "loss": 0.1836, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 6.543553540053926e-07, | |
| "loss": 0.1866, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 6.157043087284797e-07, | |
| "loss": 0.1365, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.781935324354571e-07, | |
| "loss": 0.1835, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.418275829936537e-07, | |
| "loss": 0.1197, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 5.066108791645407e-07, | |
| "loss": 0.1345, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.7254770006681105e-07, | |
| "loss": 0.1644, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.396421846564236e-07, | |
| "loss": 0.1511, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.078983312237017e-07, | |
| "loss": 0.179, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.773199969074959e-07, | |
| "loss": 0.1191, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.4791089722651437e-07, | |
| "loss": 0.127, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.1967460562785325e-07, | |
| "loss": 0.1226, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 2.926145530528002e-07, | |
| "loss": 0.1787, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 2.667340275199426e-07, | |
| "loss": 0.1438, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 2.420361737256438e-07, | |
| "loss": 0.1226, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 2.1852399266194312e-07, | |
| "loss": 0.1422, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.9620034125190645e-07, | |
| "loss": 0.1451, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.7506793200248507e-07, | |
| "loss": 0.1726, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.5512933267492813e-07, | |
| "loss": 0.1418, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.3638696597277678e-07, | |
| "loss": 0.1239, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.18843109247484e-07, | |
| "loss": 0.1554, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.0249989422169926e-07, | |
| "loss": 0.1818, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.735930673024806e-08, | |
| "loss": 0.1633, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 7.342318647883595e-08, | |
| "loss": 0.1912, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 6.069322682050516e-08, | |
| "loss": 0.1301, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 4.9170974549885844e-08, | |
| "loss": 0.1728, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.885782971524088e-08, | |
| "loss": 0.1156, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 2.9755045448351948e-08, | |
| "loss": 0.1304, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 2.1863727812254653e-08, | |
| "loss": 0.1408, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.518483566683826e-08, | |
| "loss": 0.2036, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 9.719180552341113e-09, | |
| "loss": 0.1597, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 5.467426590739511e-09, | |
| "loss": 0.1618, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 2.430090405054486e-09, | |
| "loss": 0.1651, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 6.075410565697937e-10, | |
| "loss": 0.1322, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.1284, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 297, | |
| "total_flos": 7.834692227086418e+17, | |
| "train_loss": 0.27782333958329575, | |
| "train_runtime": 6854.2409, | |
| "train_samples_per_second": 5.521, | |
| "train_steps_per_second": 0.043 | |
| } | |
| ], | |
| "max_steps": 297, | |
| "num_train_epochs": 3, | |
| "total_flos": 7.834692227086418e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |