| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.8313847752663029, | |
| "eval_steps": 500, | |
| "global_step": 4000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 25.939294815063477, | |
| "learning_rate": 5e-06, | |
| "loss": 12.7845, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 31.135154724121094, | |
| "learning_rate": 1e-05, | |
| "loss": 12.6043, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 18.018098831176758, | |
| "learning_rate": 1.5e-05, | |
| "loss": 11.974, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 15.843639373779297, | |
| "learning_rate": 2e-05, | |
| "loss": 10.9757, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 14.263230323791504, | |
| "learning_rate": 2.5e-05, | |
| "loss": 9.9366, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 8.178106307983398, | |
| "learning_rate": 3e-05, | |
| "loss": 9.1233, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.725214004516602, | |
| "learning_rate": 3.5e-05, | |
| "loss": 8.5597, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 3.4280221462249756, | |
| "learning_rate": 4e-05, | |
| "loss": 8.1551, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 2.942718029022217, | |
| "learning_rate": 4.5e-05, | |
| "loss": 8.0071, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 2.1034750938415527, | |
| "learning_rate": 5e-05, | |
| "loss": 7.9824, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 1.4053250551223755, | |
| "learning_rate": 5.500000000000001e-05, | |
| "loss": 7.9139, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 1.3692823648452759, | |
| "learning_rate": 6e-05, | |
| "loss": 7.8578, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 1.4134682416915894, | |
| "learning_rate": 6.500000000000001e-05, | |
| "loss": 7.8204, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 2.2567708492279053, | |
| "learning_rate": 7e-05, | |
| "loss": 7.6975, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.6172780990600586, | |
| "learning_rate": 7.500000000000001e-05, | |
| "loss": 7.6252, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.1234540939331055, | |
| "learning_rate": 8e-05, | |
| "loss": 7.4815, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.0313005447387695, | |
| "learning_rate": 8.5e-05, | |
| "loss": 7.3062, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.3016369342803955, | |
| "learning_rate": 9e-05, | |
| "loss": 7.1426, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.307267189025879, | |
| "learning_rate": 9.5e-05, | |
| "loss": 6.951, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.143004894256592, | |
| "learning_rate": 0.0001, | |
| "loss": 6.761, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.0914149284362793, | |
| "learning_rate": 9.999972205865686e-05, | |
| "loss": 6.6689, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.0763864517211914, | |
| "learning_rate": 9.999888823771751e-05, | |
| "loss": 6.5193, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.037773609161377, | |
| "learning_rate": 9.999749854645204e-05, | |
| "loss": 6.4309, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.3751368522644043, | |
| "learning_rate": 9.99955530003106e-05, | |
| "loss": 6.3204, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 3.2355241775512695, | |
| "learning_rate": 9.99930516209231e-05, | |
| "loss": 6.2199, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.9962437152862549, | |
| "learning_rate": 9.998999443609897e-05, | |
| "loss": 6.0844, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.7183597087860107, | |
| "learning_rate": 9.998638147982696e-05, | |
| "loss": 6.0284, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.768682837486267, | |
| "learning_rate": 9.998221279227467e-05, | |
| "loss": 5.9419, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.113523483276367, | |
| "learning_rate": 9.997748841978812e-05, | |
| "loss": 5.9186, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.2045562267303467, | |
| "learning_rate": 9.997220841489122e-05, | |
| "loss": 5.8187, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.5878034830093384, | |
| "learning_rate": 9.996637283628528e-05, | |
| "loss": 5.7585, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 3.1221227645874023, | |
| "learning_rate": 9.995998174884821e-05, | |
| "loss": 5.7104, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 5.41145133972168, | |
| "learning_rate": 9.995303522363394e-05, | |
| "loss": 5.5909, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.228010654449463, | |
| "learning_rate": 9.99455333378715e-05, | |
| "loss": 5.6572, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.5033600330352783, | |
| "learning_rate": 9.993747617496428e-05, | |
| "loss": 5.5372, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.9988412857055664, | |
| "learning_rate": 9.9928863824489e-05, | |
| "loss": 5.4833, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.1020758152008057, | |
| "learning_rate": 9.99196963821948e-05, | |
| "loss": 5.4567, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.225062847137451, | |
| "learning_rate": 9.990997395000217e-05, | |
| "loss": 5.4851, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.595822334289551, | |
| "learning_rate": 9.989969663600169e-05, | |
| "loss": 5.3496, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.4036571979522705, | |
| "learning_rate": 9.9888864554453e-05, | |
| "loss": 5.3953, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.6016147136688232, | |
| "learning_rate": 9.987747782578342e-05, | |
| "loss": 5.3285, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.005527973175049, | |
| "learning_rate": 9.986553657658668e-05, | |
| "loss": 5.239, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.4785213470458984, | |
| "learning_rate": 9.985304093962145e-05, | |
| "loss": 5.2228, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.3209078311920166, | |
| "learning_rate": 9.983999105380988e-05, | |
| "loss": 5.1534, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.1678926944732666, | |
| "learning_rate": 9.982638706423608e-05, | |
| "loss": 5.1537, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.888963460922241, | |
| "learning_rate": 9.98122291221445e-05, | |
| "loss": 5.1225, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.3525912761688232, | |
| "learning_rate": 9.979751738493826e-05, | |
| "loss": 5.0702, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.584390640258789, | |
| "learning_rate": 9.978225201617732e-05, | |
| "loss": 5.0249, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.243316888809204, | |
| "learning_rate": 9.976643318557678e-05, | |
| "loss": 5.0536, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.186607837677002, | |
| "learning_rate": 9.975006106900495e-05, | |
| "loss": 5.0348, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.3452324867248535, | |
| "learning_rate": 9.973313584848132e-05, | |
| "loss": 4.9099, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.6906020641326904, | |
| "learning_rate": 9.971565771217464e-05, | |
| "loss": 4.9684, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.046823024749756, | |
| "learning_rate": 9.969762685440076e-05, | |
| "loss": 4.9113, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.4473023414611816, | |
| "learning_rate": 9.967904347562054e-05, | |
| "loss": 4.841, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.060471296310425, | |
| "learning_rate": 9.965990778243755e-05, | |
| "loss": 4.796, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.830763578414917, | |
| "learning_rate": 9.964021998759577e-05, | |
| "loss": 4.8137, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.4487814903259277, | |
| "learning_rate": 9.961998030997733e-05, | |
| "loss": 4.8366, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.5107741355895996, | |
| "learning_rate": 9.95991889745999e-05, | |
| "loss": 4.8353, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.242281198501587, | |
| "learning_rate": 9.957784621261441e-05, | |
| "loss": 4.759, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.758716106414795, | |
| "learning_rate": 9.955595226130226e-05, | |
| "loss": 4.7099, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.7052996158599854, | |
| "learning_rate": 9.953350736407282e-05, | |
| "loss": 4.7054, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.8450052738189697, | |
| "learning_rate": 9.951051177046069e-05, | |
| "loss": 4.7192, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.9926705360412598, | |
| "learning_rate": 9.948696573612292e-05, | |
| "loss": 4.7052, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.6659626960754395, | |
| "learning_rate": 9.946286952283618e-05, | |
| "loss": 4.6958, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.2126383781433105, | |
| "learning_rate": 9.943822339849381e-05, | |
| "loss": 4.6222, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.288212537765503, | |
| "learning_rate": 9.941302763710288e-05, | |
| "loss": 4.6123, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.80583918094635, | |
| "learning_rate": 9.938728251878116e-05, | |
| "loss": 4.5957, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.7819113731384277, | |
| "learning_rate": 9.936098832975393e-05, | |
| "loss": 4.6556, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.275831937789917, | |
| "learning_rate": 9.933414536235091e-05, | |
| "loss": 4.5909, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.36179256439209, | |
| "learning_rate": 9.93067539150029e-05, | |
| "loss": 4.5135, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.9257872104644775, | |
| "learning_rate": 9.927881429223853e-05, | |
| "loss": 4.5054, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.807866096496582, | |
| "learning_rate": 9.925032680468085e-05, | |
| "loss": 4.5484, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.3611161708831787, | |
| "learning_rate": 9.922129176904388e-05, | |
| "loss": 4.5875, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.4570348262786865, | |
| "learning_rate": 9.919170950812911e-05, | |
| "loss": 4.5202, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.2100729942321777, | |
| "learning_rate": 9.916158035082184e-05, | |
| "loss": 4.4419, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.531543731689453, | |
| "learning_rate": 9.913090463208763e-05, | |
| "loss": 4.4315, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.6799845695495605, | |
| "learning_rate": 9.90996826929685e-05, | |
| "loss": 4.4464, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.1055126190185547, | |
| "learning_rate": 9.906791488057916e-05, | |
| "loss": 4.4059, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 3.3053061962127686, | |
| "learning_rate": 9.903560154810313e-05, | |
| "loss": 4.3278, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.734743595123291, | |
| "learning_rate": 9.900274305478887e-05, | |
| "loss": 4.4565, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.5019032955169678, | |
| "learning_rate": 9.896933976594572e-05, | |
| "loss": 4.4156, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.190185308456421, | |
| "learning_rate": 9.893539205293989e-05, | |
| "loss": 4.3911, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.6199285984039307, | |
| "learning_rate": 9.890090029319028e-05, | |
| "loss": 4.4045, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.0717196464538574, | |
| "learning_rate": 9.886586487016433e-05, | |
| "loss": 4.3985, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.090743064880371, | |
| "learning_rate": 9.883028617337378e-05, | |
| "loss": 4.4446, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.840830683708191, | |
| "learning_rate": 9.879416459837022e-05, | |
| "loss": 4.3904, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.5380135774612427, | |
| "learning_rate": 9.875750054674082e-05, | |
| "loss": 4.3363, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.3348779678344727, | |
| "learning_rate": 9.872029442610382e-05, | |
| "loss": 4.3284, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.1247851848602295, | |
| "learning_rate": 9.8682546650104e-05, | |
| "loss": 4.3372, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.7791380882263184, | |
| "learning_rate": 9.864425763840802e-05, | |
| "loss": 4.3115, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.4238507747650146, | |
| "learning_rate": 9.860542781669988e-05, | |
| "loss": 4.3031, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.7853262424468994, | |
| "learning_rate": 9.85660576166761e-05, | |
| "loss": 4.2563, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.9320998191833496, | |
| "learning_rate": 9.852614747604093e-05, | |
| "loss": 4.3229, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.030333995819092, | |
| "learning_rate": 9.848569783850145e-05, | |
| "loss": 4.2428, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.0407378673553467, | |
| "learning_rate": 9.844470915376278e-05, | |
| "loss": 4.2509, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.0037548542022705, | |
| "learning_rate": 9.840318187752292e-05, | |
| "loss": 4.2047, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.753675103187561, | |
| "learning_rate": 9.836111647146771e-05, | |
| "loss": 4.2229, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.087698459625244, | |
| "learning_rate": 9.831851340326577e-05, | |
| "loss": 4.2335, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.436624526977539, | |
| "learning_rate": 9.82753731465633e-05, | |
| "loss": 4.2203, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.1177568435668945, | |
| "learning_rate": 9.823169618097871e-05, | |
| "loss": 4.2539, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.189920425415039, | |
| "learning_rate": 9.81874829920974e-05, | |
| "loss": 4.2498, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.241565465927124, | |
| "learning_rate": 9.814273407146623e-05, | |
| "loss": 4.1886, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.1604373455047607, | |
| "learning_rate": 9.809744991658829e-05, | |
| "loss": 4.2142, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.8642898797988892, | |
| "learning_rate": 9.805163103091708e-05, | |
| "loss": 4.1374, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.8172324895858765, | |
| "learning_rate": 9.800527792385112e-05, | |
| "loss": 4.0864, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.7565423250198364, | |
| "learning_rate": 9.79583911107282e-05, | |
| "loss": 4.2458, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.3410630226135254, | |
| "learning_rate": 9.791097111281968e-05, | |
| "loss": 4.2159, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.873470664024353, | |
| "learning_rate": 9.786301845732467e-05, | |
| "loss": 4.1783, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.633479595184326, | |
| "learning_rate": 9.781453367736418e-05, | |
| "loss": 4.1571, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.039921522140503, | |
| "learning_rate": 9.776551731197524e-05, | |
| "loss": 4.0202, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.8741806745529175, | |
| "learning_rate": 9.771596990610478e-05, | |
| "loss": 4.1756, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.023174524307251, | |
| "learning_rate": 9.766589201060372e-05, | |
| "loss": 4.1419, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.0675840377807617, | |
| "learning_rate": 9.761528418222077e-05, | |
| "loss": 4.1443, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.8636735677719116, | |
| "learning_rate": 9.756414698359624e-05, | |
| "loss": 4.1936, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.9990111589431763, | |
| "learning_rate": 9.75124809832558e-05, | |
| "loss": 4.1422, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.035581588745117, | |
| "learning_rate": 9.746028675560413e-05, | |
| "loss": 4.1034, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.8671371936798096, | |
| "learning_rate": 9.740756488091861e-05, | |
| "loss": 4.0496, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.781253695487976, | |
| "learning_rate": 9.735431594534277e-05, | |
| "loss": 4.0934, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.7661689519882202, | |
| "learning_rate": 9.730054054087983e-05, | |
| "loss": 4.0591, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.5251692533493042, | |
| "learning_rate": 9.724623926538612e-05, | |
| "loss": 4.1665, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.780681848526001, | |
| "learning_rate": 9.719141272256443e-05, | |
| "loss": 4.0809, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.781661033630371, | |
| "learning_rate": 9.713606152195726e-05, | |
| "loss": 4.1569, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.382237195968628, | |
| "learning_rate": 9.708018627894011e-05, | |
| "loss": 4.051, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.8986462354660034, | |
| "learning_rate": 9.702378761471456e-05, | |
| "loss": 3.9784, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.7505619525909424, | |
| "learning_rate": 9.696686615630146e-05, | |
| "loss": 4.0702, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.7269989252090454, | |
| "learning_rate": 9.690942253653385e-05, | |
| "loss": 4.1118, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.6084879636764526, | |
| "learning_rate": 9.685145739405002e-05, | |
| "loss": 3.9587, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.6005403995513916, | |
| "learning_rate": 9.679297137328634e-05, | |
| "loss": 3.9399, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.661289095878601, | |
| "learning_rate": 9.673396512447013e-05, | |
| "loss": 4.0649, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.734930157661438, | |
| "learning_rate": 9.667443930361247e-05, | |
| "loss": 4.055, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.766442894935608, | |
| "learning_rate": 9.661439457250076e-05, | |
| "loss": 4.027, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.7537400722503662, | |
| "learning_rate": 9.655383159869158e-05, | |
| "loss": 3.9548, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.6819093227386475, | |
| "learning_rate": 9.649275105550309e-05, | |
| "loss": 3.9764, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.7222894430160522, | |
| "learning_rate": 9.643115362200762e-05, | |
| "loss": 3.978, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.6062192916870117, | |
| "learning_rate": 9.636903998302409e-05, | |
| "loss": 4.0146, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.6688106060028076, | |
| "learning_rate": 9.630641082911045e-05, | |
| "loss": 3.9447, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.630411148071289, | |
| "learning_rate": 9.624326685655593e-05, | |
| "loss": 3.9864, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.6122987270355225, | |
| "learning_rate": 9.617960876737337e-05, | |
| "loss": 3.811, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.5039477348327637, | |
| "learning_rate": 9.611543726929134e-05, | |
| "loss": 4.009, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.8282253742218018, | |
| "learning_rate": 9.605075307574635e-05, | |
| "loss": 3.908, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.4583191871643066, | |
| "learning_rate": 9.598555690587487e-05, | |
| "loss": 3.9433, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.647357702255249, | |
| "learning_rate": 9.591984948450532e-05, | |
| "loss": 3.9461, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.5647929906845093, | |
| "learning_rate": 9.585363154215008e-05, | |
| "loss": 3.9476, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.8287334442138672, | |
| "learning_rate": 9.578690381499728e-05, | |
| "loss": 3.8961, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.4925986528396606, | |
| "learning_rate": 9.571966704490271e-05, | |
| "loss": 3.9188, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.3406325578689575, | |
| "learning_rate": 9.565192197938148e-05, | |
| "loss": 3.8122, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.547147274017334, | |
| "learning_rate": 9.558366937159977e-05, | |
| "loss": 3.9272, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.4740064144134521, | |
| "learning_rate": 9.551490998036646e-05, | |
| "loss": 3.8529, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.2662999629974365, | |
| "learning_rate": 9.544564457012463e-05, | |
| "loss": 3.9713, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.9086430072784424, | |
| "learning_rate": 9.537587391094314e-05, | |
| "loss": 3.9148, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.478953242301941, | |
| "learning_rate": 9.5305598778508e-05, | |
| "loss": 3.9709, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.454502820968628, | |
| "learning_rate": 9.52348199541138e-05, | |
| "loss": 3.9076, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.1357702016830444, | |
| "learning_rate": 9.516353822465504e-05, | |
| "loss": 3.8234, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.2921209335327148, | |
| "learning_rate": 9.509175438261726e-05, | |
| "loss": 3.8645, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.3879990577697754, | |
| "learning_rate": 9.501946922606838e-05, | |
| "loss": 3.8624, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.6303082704544067, | |
| "learning_rate": 9.494668355864973e-05, | |
| "loss": 3.9331, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.6471551656723022, | |
| "learning_rate": 9.487339818956716e-05, | |
| "loss": 3.8262, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.5036301612854004, | |
| "learning_rate": 9.479961393358203e-05, | |
| "loss": 3.9007, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.3957103490829468, | |
| "learning_rate": 9.472533161100215e-05, | |
| "loss": 3.8643, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.6850672960281372, | |
| "learning_rate": 9.465055204767265e-05, | |
| "loss": 3.8083, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.609654188156128, | |
| "learning_rate": 9.457527607496685e-05, | |
| "loss": 3.8574, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.4732401371002197, | |
| "learning_rate": 9.44995045297769e-05, | |
| "loss": 3.8665, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.182054042816162, | |
| "learning_rate": 9.442323825450464e-05, | |
| "loss": 3.8626, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.5602997541427612, | |
| "learning_rate": 9.43464780970521e-05, | |
| "loss": 3.7527, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.391682505607605, | |
| "learning_rate": 9.426922491081212e-05, | |
| "loss": 3.7728, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.333884596824646, | |
| "learning_rate": 9.419147955465888e-05, | |
| "loss": 3.8298, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.513197660446167, | |
| "learning_rate": 9.411324289293832e-05, | |
| "loss": 3.8476, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.422322154045105, | |
| "learning_rate": 9.403451579545859e-05, | |
| "loss": 3.8966, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.3686108589172363, | |
| "learning_rate": 9.395529913748025e-05, | |
| "loss": 3.8592, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.4392951726913452, | |
| "learning_rate": 9.387559379970672e-05, | |
| "loss": 3.7981, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.14130699634552, | |
| "learning_rate": 9.379540066827431e-05, | |
| "loss": 3.8596, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.2033264636993408, | |
| "learning_rate": 9.371472063474248e-05, | |
| "loss": 3.7935, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.5537389516830444, | |
| "learning_rate": 9.363355459608394e-05, | |
| "loss": 3.7443, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.5609865188598633, | |
| "learning_rate": 9.355190345467457e-05, | |
| "loss": 3.7242, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.5546597242355347, | |
| "learning_rate": 9.346976811828352e-05, | |
| "loss": 3.8129, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.34921395778656, | |
| "learning_rate": 9.338714950006297e-05, | |
| "loss": 3.8219, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.2954776287078857, | |
| "learning_rate": 9.330404851853817e-05, | |
| "loss": 3.7279, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.467391848564148, | |
| "learning_rate": 9.3220466097597e-05, | |
| "loss": 3.7091, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.4651292562484741, | |
| "learning_rate": 9.313640316647991e-05, | |
| "loss": 3.8012, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.6071767807006836, | |
| "learning_rate": 9.305186065976945e-05, | |
| "loss": 3.6903, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.333256721496582, | |
| "learning_rate": 9.296683951737993e-05, | |
| "loss": 3.6845, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.358489990234375, | |
| "learning_rate": 9.288134068454697e-05, | |
| "loss": 3.7423, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.4946008920669556, | |
| "learning_rate": 9.2795365111817e-05, | |
| "loss": 3.6715, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.2223352193832397, | |
| "learning_rate": 9.270891375503665e-05, | |
| "loss": 3.7082, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.1607824563980103, | |
| "learning_rate": 9.262198757534218e-05, | |
| "loss": 3.7073, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.3681381940841675, | |
| "learning_rate": 9.253458753914874e-05, | |
| "loss": 3.742, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.3831149339675903, | |
| "learning_rate": 9.244671461813969e-05, | |
| "loss": 3.6545, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.584140419960022, | |
| "learning_rate": 9.235836978925572e-05, | |
| "loss": 3.7261, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.2573814392089844, | |
| "learning_rate": 9.226955403468406e-05, | |
| "loss": 3.6429, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.334175944328308, | |
| "learning_rate": 9.21802683418475e-05, | |
| "loss": 3.6895, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.3122330904006958, | |
| "learning_rate": 9.209051370339347e-05, | |
| "loss": 3.7009, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.355034351348877, | |
| "learning_rate": 9.200029111718295e-05, | |
| "loss": 3.5915, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.214666485786438, | |
| "learning_rate": 9.190960158627941e-05, | |
| "loss": 3.662, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.1528154611587524, | |
| "learning_rate": 9.181844611893766e-05, | |
| "loss": 3.6683, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.203173041343689, | |
| "learning_rate": 9.172682572859261e-05, | |
| "loss": 3.6648, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.1646435260772705, | |
| "learning_rate": 9.163474143384806e-05, | |
| "loss": 3.6553, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.1690576076507568, | |
| "learning_rate": 9.154219425846528e-05, | |
| "loss": 3.6823, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.475153923034668, | |
| "learning_rate": 9.144918523135175e-05, | |
| "loss": 3.6068, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.4765112400054932, | |
| "learning_rate": 9.13557153865496e-05, | |
| "loss": 3.681, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.2363364696502686, | |
| "learning_rate": 9.12617857632242e-05, | |
| "loss": 3.6482, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.7234574556350708, | |
| "learning_rate": 9.116739740565259e-05, | |
| "loss": 3.5581, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.4000308513641357, | |
| "learning_rate": 9.107255136321184e-05, | |
| "loss": 3.7079, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.4503638744354248, | |
| "learning_rate": 9.09772486903674e-05, | |
| "loss": 3.695, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.298401117324829, | |
| "learning_rate": 9.08814904466614e-05, | |
| "loss": 3.6224, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.1515898704528809, | |
| "learning_rate": 9.078527769670085e-05, | |
| "loss": 3.6393, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.218999981880188, | |
| "learning_rate": 9.068861151014575e-05, | |
| "loss": 3.6757, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.216417670249939, | |
| "learning_rate": 9.05914929616973e-05, | |
| "loss": 3.5981, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.0293338298797607, | |
| "learning_rate": 9.04939231310859e-05, | |
| "loss": 3.5595, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.1071323156356812, | |
| "learning_rate": 9.039590310305914e-05, | |
| "loss": 3.7037, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.1778522729873657, | |
| "learning_rate": 9.029743396736974e-05, | |
| "loss": 3.6686, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.1828972101211548, | |
| "learning_rate": 9.019851681876348e-05, | |
| "loss": 3.6933, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.0492703914642334, | |
| "learning_rate": 9.009915275696693e-05, | |
| "loss": 3.6414, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.3256988525390625, | |
| "learning_rate": 8.999934288667534e-05, | |
| "loss": 3.6565, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.5010437965393066, | |
| "learning_rate": 8.989908831754028e-05, | |
| "loss": 3.5857, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.3305867910385132, | |
| "learning_rate": 8.979839016415735e-05, | |
| "loss": 3.5996, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.219081163406372, | |
| "learning_rate": 8.969724954605373e-05, | |
| "loss": 3.5765, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.2244614362716675, | |
| "learning_rate": 8.959566758767581e-05, | |
| "loss": 3.5575, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.2763677835464478, | |
| "learning_rate": 8.949364541837661e-05, | |
| "loss": 3.6174, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.2399697303771973, | |
| "learning_rate": 8.939118417240329e-05, | |
| "loss": 3.632, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.4178550243377686, | |
| "learning_rate": 8.92882849888845e-05, | |
| "loss": 3.5089, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.2364768981933594, | |
| "learning_rate": 8.918494901181773e-05, | |
| "loss": 3.6284, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.2779579162597656, | |
| "learning_rate": 8.908117739005659e-05, | |
| "loss": 3.5869, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.045442819595337, | |
| "learning_rate": 8.897697127729805e-05, | |
| "loss": 3.6054, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.0831084251403809, | |
| "learning_rate": 8.887233183206957e-05, | |
| "loss": 3.6177, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.090774416923523, | |
| "learning_rate": 8.876726021771627e-05, | |
| "loss": 3.5378, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.1567001342773438, | |
| "learning_rate": 8.866175760238798e-05, | |
| "loss": 3.5421, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.2113406658172607, | |
| "learning_rate": 8.855582515902625e-05, | |
| "loss": 3.5202, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.0961172580718994, | |
| "learning_rate": 8.844946406535131e-05, | |
| "loss": 3.4994, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.2077534198760986, | |
| "learning_rate": 8.834267550384893e-05, | |
| "loss": 3.5358, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.1414586305618286, | |
| "learning_rate": 8.823546066175741e-05, | |
| "loss": 3.616, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.1577489376068115, | |
| "learning_rate": 8.81278207310542e-05, | |
| "loss": 3.5759, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.2198580503463745, | |
| "learning_rate": 8.801975690844278e-05, | |
| "loss": 3.5518, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.0149977207183838, | |
| "learning_rate": 8.791127039533934e-05, | |
| "loss": 3.5234, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.218048095703125, | |
| "learning_rate": 8.780236239785935e-05, | |
| "loss": 3.5177, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.1406141519546509, | |
| "learning_rate": 8.76930341268042e-05, | |
| "loss": 3.5053, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.0759096145629883, | |
| "learning_rate": 8.758328679764776e-05, | |
| "loss": 3.5222, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.0843647718429565, | |
| "learning_rate": 8.747312163052284e-05, | |
| "loss": 3.4956, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.1581239700317383, | |
| "learning_rate": 8.736253985020761e-05, | |
| "loss": 3.5296, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.0814120769500732, | |
| "learning_rate": 8.725154268611203e-05, | |
| "loss": 3.5113, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.1096363067626953, | |
| "learning_rate": 8.714013137226411e-05, | |
| "loss": 3.5051, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.064050555229187, | |
| "learning_rate": 8.702830714729628e-05, | |
| "loss": 3.4517, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.1795005798339844, | |
| "learning_rate": 8.691607125443153e-05, | |
| "loss": 3.5231, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.0425444841384888, | |
| "learning_rate": 8.680342494146967e-05, | |
| "loss": 3.4568, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.016908049583435, | |
| "learning_rate": 8.66903694607734e-05, | |
| "loss": 3.4431, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.0485405921936035, | |
| "learning_rate": 8.65769060692544e-05, | |
| "loss": 3.4538, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.0376747846603394, | |
| "learning_rate": 8.646303602835936e-05, | |
| "loss": 3.575, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.0116878747940063, | |
| "learning_rate": 8.634876060405597e-05, | |
| "loss": 3.5143, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.0038654804229736, | |
| "learning_rate": 8.623408106681884e-05, | |
| "loss": 3.529, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.028887152671814, | |
| "learning_rate": 8.611899869161535e-05, | |
| "loss": 3.526, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 0.9845506548881531, | |
| "learning_rate": 8.600351475789147e-05, | |
| "loss": 3.3989, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.0417760610580444, | |
| "learning_rate": 8.588763054955764e-05, | |
| "loss": 3.4547, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.1159653663635254, | |
| "learning_rate": 8.57713473549743e-05, | |
| "loss": 3.4336, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.024071455001831, | |
| "learning_rate": 8.565466646693778e-05, | |
| "loss": 3.4789, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.0769356489181519, | |
| "learning_rate": 8.553758918266578e-05, | |
| "loss": 3.4477, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.9152624607086182, | |
| "learning_rate": 8.5420116803783e-05, | |
| "loss": 3.458, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.9252551198005676, | |
| "learning_rate": 8.530225063630668e-05, | |
| "loss": 3.4631, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.9853327870368958, | |
| "learning_rate": 8.518399199063205e-05, | |
| "loss": 3.4846, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.0301434993743896, | |
| "learning_rate": 8.50653421815178e-05, | |
| "loss": 3.5413, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.9440063238143921, | |
| "learning_rate": 8.494630252807138e-05, | |
| "loss": 3.421, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.968291699886322, | |
| "learning_rate": 8.482687435373449e-05, | |
| "loss": 3.4441, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.6729166507720947, | |
| "learning_rate": 8.470705898626817e-05, | |
| "loss": 3.3822, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.0198301076889038, | |
| "learning_rate": 8.458685775773822e-05, | |
| "loss": 3.4128, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.0199130773544312, | |
| "learning_rate": 8.446627200450025e-05, | |
| "loss": 3.4023, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 0.8610997200012207, | |
| "learning_rate": 8.434530306718493e-05, | |
| "loss": 3.3779, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.9561224579811096, | |
| "learning_rate": 8.4223952290683e-05, | |
| "loss": 3.4433, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.0564466714859009, | |
| "learning_rate": 8.41022210241304e-05, | |
| "loss": 3.393, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.0251822471618652, | |
| "learning_rate": 8.398011062089316e-05, | |
| "loss": 3.4585, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.1124573945999146, | |
| "learning_rate": 8.385762243855249e-05, | |
| "loss": 3.3761, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.0452122688293457, | |
| "learning_rate": 8.373475783888958e-05, | |
| "loss": 3.4468, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.1632424592971802, | |
| "learning_rate": 8.36115181878705e-05, | |
| "loss": 3.393, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.0344336032867432, | |
| "learning_rate": 8.348790485563101e-05, | |
| "loss": 3.4242, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.9477317929267883, | |
| "learning_rate": 8.336391921646134e-05, | |
| "loss": 3.313, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.9149269461631775, | |
| "learning_rate": 8.323956264879089e-05, | |
| "loss": 3.4397, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.8402585387229919, | |
| "learning_rate": 8.311483653517294e-05, | |
| "loss": 3.3697, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.8762050867080688, | |
| "learning_rate": 8.298974226226919e-05, | |
| "loss": 3.41, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.914233922958374, | |
| "learning_rate": 8.28642812208345e-05, | |
| "loss": 3.3886, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.9344747066497803, | |
| "learning_rate": 8.273845480570123e-05, | |
| "loss": 3.4337, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.951545774936676, | |
| "learning_rate": 8.26122644157639e-05, | |
| "loss": 3.362, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.9447149634361267, | |
| "learning_rate": 8.248571145396362e-05, | |
| "loss": 3.3801, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.0801563262939453, | |
| "learning_rate": 8.235879732727236e-05, | |
| "loss": 3.3573, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.9000726938247681, | |
| "learning_rate": 8.223152344667745e-05, | |
| "loss": 3.4659, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.0399258136749268, | |
| "learning_rate": 8.21038912271658e-05, | |
| "loss": 3.3936, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 0.8596981763839722, | |
| "learning_rate": 8.197590208770824e-05, | |
| "loss": 3.4114, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.9778981804847717, | |
| "learning_rate": 8.184755745124371e-05, | |
| "loss": 3.3424, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.8886977434158325, | |
| "learning_rate": 8.171885874466342e-05, | |
| "loss": 3.448, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.0350151062011719, | |
| "learning_rate": 8.158980739879507e-05, | |
| "loss": 3.365, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.008541464805603, | |
| "learning_rate": 8.146040484838677e-05, | |
| "loss": 3.3698, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.9444683194160461, | |
| "learning_rate": 8.133065253209132e-05, | |
| "loss": 3.4307, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.9216312170028687, | |
| "learning_rate": 8.120055189245e-05, | |
| "loss": 3.3684, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.8898837566375732, | |
| "learning_rate": 8.10701043758767e-05, | |
| "loss": 3.2979, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.9232613444328308, | |
| "learning_rate": 8.093931143264174e-05, | |
| "loss": 3.4852, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.9185067415237427, | |
| "learning_rate": 8.080817451685576e-05, | |
| "loss": 3.4286, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 0.9076113104820251, | |
| "learning_rate": 8.067669508645356e-05, | |
| "loss": 3.4037, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.8780031204223633, | |
| "learning_rate": 8.054487460317797e-05, | |
| "loss": 3.4238, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.9289349913597107, | |
| "learning_rate": 8.041271453256345e-05, | |
| "loss": 3.4431, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.993030846118927, | |
| "learning_rate": 8.02802163439199e-05, | |
| "loss": 3.3544, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.9764955043792725, | |
| "learning_rate": 8.01473815103163e-05, | |
| "loss": 3.2897, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.0672012567520142, | |
| "learning_rate": 8.001421150856434e-05, | |
| "loss": 3.3286, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.9533431529998779, | |
| "learning_rate": 7.988070781920197e-05, | |
| "loss": 3.2994, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.0188872814178467, | |
| "learning_rate": 7.9746871926477e-05, | |
| "loss": 3.3446, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.9545197486877441, | |
| "learning_rate": 7.961270531833052e-05, | |
| "loss": 3.3431, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.0579429864883423, | |
| "learning_rate": 7.947820948638045e-05, | |
| "loss": 3.3189, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 0.9583922624588013, | |
| "learning_rate": 7.934338592590486e-05, | |
| "loss": 3.3625, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.020835518836975, | |
| "learning_rate": 7.92082361358254e-05, | |
| "loss": 3.2876, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.9083346724510193, | |
| "learning_rate": 7.907276161869065e-05, | |
| "loss": 3.2336, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.009750247001648, | |
| "learning_rate": 7.893696388065936e-05, | |
| "loss": 3.383, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.9893795251846313, | |
| "learning_rate": 7.88008444314838e-05, | |
| "loss": 3.2978, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.8603305816650391, | |
| "learning_rate": 7.866440478449283e-05, | |
| "loss": 3.3148, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.8487975597381592, | |
| "learning_rate": 7.852764645657522e-05, | |
| "loss": 3.2816, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.8893052935600281, | |
| "learning_rate": 7.839057096816271e-05, | |
| "loss": 3.3576, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.8963512182235718, | |
| "learning_rate": 7.82531798432131e-05, | |
| "loss": 3.4338, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.856163501739502, | |
| "learning_rate": 7.811547460919333e-05, | |
| "loss": 3.2469, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.8650716543197632, | |
| "learning_rate": 7.797745679706254e-05, | |
| "loss": 3.3202, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.9630741477012634, | |
| "learning_rate": 7.783912794125496e-05, | |
| "loss": 3.237, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.8688840270042419, | |
| "learning_rate": 7.770048957966291e-05, | |
| "loss": 3.365, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.9968611001968384, | |
| "learning_rate": 7.756154325361967e-05, | |
| "loss": 3.3255, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.9145302176475525, | |
| "learning_rate": 7.74222905078824e-05, | |
| "loss": 3.1962, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.9410063624382019, | |
| "learning_rate": 7.728273289061489e-05, | |
| "loss": 3.3402, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.9378871321678162, | |
| "learning_rate": 7.714287195337044e-05, | |
| "loss": 3.3085, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.9802606105804443, | |
| "learning_rate": 7.700270925107448e-05, | |
| "loss": 3.3233, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.8993830680847168, | |
| "learning_rate": 7.686224634200742e-05, | |
| "loss": 3.3775, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 0.8461673259735107, | |
| "learning_rate": 7.672148478778722e-05, | |
| "loss": 3.311, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.9666621685028076, | |
| "learning_rate": 7.658042615335212e-05, | |
| "loss": 3.2574, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8989726305007935, | |
| "learning_rate": 7.643907200694318e-05, | |
| "loss": 3.1906, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8429153561592102, | |
| "learning_rate": 7.629742392008684e-05, | |
| "loss": 3.3073, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.9168885350227356, | |
| "learning_rate": 7.615548346757749e-05, | |
| "loss": 3.284, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8547226786613464, | |
| "learning_rate": 7.60132522274599e-05, | |
| "loss": 3.2713, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8922094106674194, | |
| "learning_rate": 7.587073178101178e-05, | |
| "loss": 3.2508, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8666452169418335, | |
| "learning_rate": 7.572792371272609e-05, | |
| "loss": 3.3078, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.8513102531433105, | |
| "learning_rate": 7.55848296102935e-05, | |
| "loss": 3.3357, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 0.9609710574150085, | |
| "learning_rate": 7.544145106458465e-05, | |
| "loss": 3.1825, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.964782178401947, | |
| "learning_rate": 7.529778966963259e-05, | |
| "loss": 3.2379, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.922019898891449, | |
| "learning_rate": 7.515384702261496e-05, | |
| "loss": 3.3127, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.975131094455719, | |
| "learning_rate": 7.500962472383627e-05, | |
| "loss": 3.2664, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.7999668121337891, | |
| "learning_rate": 7.486512437671011e-05, | |
| "loss": 3.3116, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.846670389175415, | |
| "learning_rate": 7.472034758774128e-05, | |
| "loss": 3.303, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.7883259654045105, | |
| "learning_rate": 7.457529596650797e-05, | |
| "loss": 3.3054, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.8919851183891296, | |
| "learning_rate": 7.442997112564392e-05, | |
| "loss": 3.2546, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.8452056646347046, | |
| "learning_rate": 7.428437468082037e-05, | |
| "loss": 3.2351, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.8352906703948975, | |
| "learning_rate": 7.413850825072817e-05, | |
| "loss": 3.2604, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 0.820219874382019, | |
| "learning_rate": 7.39923734570598e-05, | |
| "loss": 3.3134, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8693707585334778, | |
| "learning_rate": 7.384597192449126e-05, | |
| "loss": 3.2686, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8277323246002197, | |
| "learning_rate": 7.369930528066412e-05, | |
| "loss": 3.2213, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.9113200902938843, | |
| "learning_rate": 7.355237515616732e-05, | |
| "loss": 3.2243, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8051711916923523, | |
| "learning_rate": 7.340518318451914e-05, | |
| "loss": 3.295, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.7537018656730652, | |
| "learning_rate": 7.325773100214893e-05, | |
| "loss": 3.3072, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.9294073581695557, | |
| "learning_rate": 7.311002024837899e-05, | |
| "loss": 3.2018, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8529069423675537, | |
| "learning_rate": 7.296205256540633e-05, | |
| "loss": 3.2521, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8739511966705322, | |
| "learning_rate": 7.281382959828443e-05, | |
| "loss": 3.2296, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8794721364974976, | |
| "learning_rate": 7.26653529949049e-05, | |
| "loss": 3.2836, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.8752202987670898, | |
| "learning_rate": 7.25166244059792e-05, | |
| "loss": 3.1781, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.8442619442939758, | |
| "learning_rate": 7.236764548502029e-05, | |
| "loss": 3.2694, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.8436053395271301, | |
| "learning_rate": 7.221841788832421e-05, | |
| "loss": 3.2683, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.8704015612602234, | |
| "learning_rate": 7.206894327495173e-05, | |
| "loss": 3.2526, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.8538575768470764, | |
| "learning_rate": 7.191922330670982e-05, | |
| "loss": 3.2357, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.666717529296875, | |
| "learning_rate": 7.176925964813326e-05, | |
| "loss": 3.1943, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.9092399477958679, | |
| "learning_rate": 7.161905396646607e-05, | |
| "loss": 3.2465, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.8651665449142456, | |
| "learning_rate": 7.146860793164299e-05, | |
| "loss": 3.252, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.7834779620170593, | |
| "learning_rate": 7.131792321627098e-05, | |
| "loss": 3.172, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 0.8776272535324097, | |
| "learning_rate": 7.116700149561048e-05, | |
| "loss": 3.245, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8585174679756165, | |
| "learning_rate": 7.101584444755696e-05, | |
| "loss": 3.1754, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8505420684814453, | |
| "learning_rate": 7.086445375262212e-05, | |
| "loss": 3.2008, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8369579315185547, | |
| "learning_rate": 7.071283109391528e-05, | |
| "loss": 3.1874, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.9212518930435181, | |
| "learning_rate": 7.056097815712466e-05, | |
| "loss": 3.2194, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.7666532397270203, | |
| "learning_rate": 7.040889663049862e-05, | |
| "loss": 3.2531, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8339570760726929, | |
| "learning_rate": 7.025658820482693e-05, | |
| "loss": 3.2754, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8046278357505798, | |
| "learning_rate": 7.010405457342192e-05, | |
| "loss": 3.1266, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8452913761138916, | |
| "learning_rate": 6.995129743209967e-05, | |
| "loss": 3.1985, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.7863709330558777, | |
| "learning_rate": 6.97983184791612e-05, | |
| "loss": 3.1784, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 0.8239853382110596, | |
| "learning_rate": 6.964511941537355e-05, | |
| "loss": 3.2252, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.8609334230422974, | |
| "learning_rate": 6.949170194395083e-05, | |
| "loss": 3.2959, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.8544238805770874, | |
| "learning_rate": 6.933806777053536e-05, | |
| "loss": 3.2162, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.8129177093505859, | |
| "learning_rate": 6.918421860317872e-05, | |
| "loss": 3.1964, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.7961152791976929, | |
| "learning_rate": 6.903015615232263e-05, | |
| "loss": 3.1437, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.8143430352210999, | |
| "learning_rate": 6.887588213078012e-05, | |
| "loss": 3.1697, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.7973166108131409, | |
| "learning_rate": 6.87213982537163e-05, | |
| "loss": 3.2621, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.6866167783737183, | |
| "learning_rate": 6.856670623862943e-05, | |
| "loss": 3.2756, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.8205069303512573, | |
| "learning_rate": 6.841180780533179e-05, | |
| "loss": 3.1893, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.7730553150177002, | |
| "learning_rate": 6.82567046759305e-05, | |
| "loss": 3.1679, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 0.8687601089477539, | |
| "learning_rate": 6.810139857480844e-05, | |
| "loss": 3.2176, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.8935238122940063, | |
| "learning_rate": 6.794589122860509e-05, | |
| "loss": 3.108, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.9757806062698364, | |
| "learning_rate": 6.779018436619725e-05, | |
| "loss": 3.2123, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.8979082107543945, | |
| "learning_rate": 6.763427971867992e-05, | |
| "loss": 3.2193, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.8538148999214172, | |
| "learning_rate": 6.747817901934699e-05, | |
| "loss": 3.0894, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.4888596534729004, | |
| "learning_rate": 6.732188400367197e-05, | |
| "loss": 3.1901, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.8202100992202759, | |
| "learning_rate": 6.716539640928871e-05, | |
| "loss": 3.0955, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 2.123887777328491, | |
| "learning_rate": 6.70087179759721e-05, | |
| "loss": 3.1657, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.7670360803604126, | |
| "learning_rate": 6.685185044561874e-05, | |
| "loss": 3.1594, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.8550339937210083, | |
| "learning_rate": 6.669479556222747e-05, | |
| "loss": 3.2074, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.8555883765220642, | |
| "learning_rate": 6.653755507188013e-05, | |
| "loss": 3.164, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.8047502636909485, | |
| "learning_rate": 6.638013072272205e-05, | |
| "loss": 3.1951, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.8157230615615845, | |
| "learning_rate": 6.622252426494259e-05, | |
| "loss": 3.1255, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.8626261353492737, | |
| "learning_rate": 6.606473745075581e-05, | |
| "loss": 3.2235, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.8414022922515869, | |
| "learning_rate": 6.590677203438084e-05, | |
| "loss": 3.1986, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.7894745469093323, | |
| "learning_rate": 6.574862977202252e-05, | |
| "loss": 3.1613, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.752762496471405, | |
| "learning_rate": 6.559031242185174e-05, | |
| "loss": 3.1139, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.8221846222877502, | |
| "learning_rate": 6.543182174398597e-05, | |
| "loss": 3.0958, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.7978451251983643, | |
| "learning_rate": 6.52731595004697e-05, | |
| "loss": 3.0795, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 0.7774230241775513, | |
| "learning_rate": 6.51143274552548e-05, | |
| "loss": 3.1222, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.8245337605476379, | |
| "learning_rate": 6.495532737418098e-05, | |
| "loss": 3.129, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.9067891240119934, | |
| "learning_rate": 6.479616102495605e-05, | |
| "loss": 3.0893, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.7573225498199463, | |
| "learning_rate": 6.463683017713638e-05, | |
| "loss": 3.0931, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.8494120240211487, | |
| "learning_rate": 6.447733660210715e-05, | |
| "loss": 3.1648, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.8871794939041138, | |
| "learning_rate": 6.431768207306272e-05, | |
| "loss": 3.1761, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.7931782603263855, | |
| "learning_rate": 6.415786836498684e-05, | |
| "loss": 3.1153, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.7971853017807007, | |
| "learning_rate": 6.399789725463298e-05, | |
| "loss": 3.1404, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.8052401542663574, | |
| "learning_rate": 6.383777052050458e-05, | |
| "loss": 3.0916, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 0.7802801728248596, | |
| "learning_rate": 6.367748994283518e-05, | |
| "loss": 3.0932, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.8090404272079468, | |
| "learning_rate": 6.351705730356877e-05, | |
| "loss": 3.2049, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.7659323811531067, | |
| "learning_rate": 6.335647438633987e-05, | |
| "loss": 3.0673, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.8041963577270508, | |
| "learning_rate": 6.319574297645374e-05, | |
| "loss": 3.1267, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.8017270565032959, | |
| "learning_rate": 6.303486486086654e-05, | |
| "loss": 3.092, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.8264429569244385, | |
| "learning_rate": 6.287384182816546e-05, | |
| "loss": 3.0634, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.875568151473999, | |
| "learning_rate": 6.271267566854883e-05, | |
| "loss": 3.0476, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.8124510049819946, | |
| "learning_rate": 6.255136817380618e-05, | |
| "loss": 3.1748, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.7940680384635925, | |
| "learning_rate": 6.23899211372984e-05, | |
| "loss": 3.1026, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.7900389432907104, | |
| "learning_rate": 6.222833635393772e-05, | |
| "loss": 3.1854, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 0.8187170624732971, | |
| "learning_rate": 6.206661562016782e-05, | |
| "loss": 3.1887, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.7527194619178772, | |
| "learning_rate": 6.190476073394382e-05, | |
| "loss": 3.1655, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.4405195713043213, | |
| "learning_rate": 6.17427734947123e-05, | |
| "loss": 3.0655, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.7956041693687439, | |
| "learning_rate": 6.158065570339127e-05, | |
| "loss": 2.9956, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.7881782650947571, | |
| "learning_rate": 6.141840916235021e-05, | |
| "loss": 3.1321, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.8817310333251953, | |
| "learning_rate": 6.125603567539001e-05, | |
| "loss": 3.0608, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.7974894642829895, | |
| "learning_rate": 6.109353704772284e-05, | |
| "loss": 3.147, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.802848219871521, | |
| "learning_rate": 6.0930915085952164e-05, | |
| "loss": 3.1836, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.7838597893714905, | |
| "learning_rate": 6.076817159805267e-05, | |
| "loss": 3.06, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.8026391863822937, | |
| "learning_rate": 6.06053083933501e-05, | |
| "loss": 3.1263, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.7921155691146851, | |
| "learning_rate": 6.044232728250116e-05, | |
| "loss": 3.1318, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.984750509262085, | |
| "learning_rate": 6.027923007747339e-05, | |
| "loss": 3.1265, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.833447277545929, | |
| "learning_rate": 6.011601859152506e-05, | |
| "loss": 3.0889, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7781099081039429, | |
| "learning_rate": 5.995269463918495e-05, | |
| "loss": 3.1421, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7567946910858154, | |
| "learning_rate": 5.97892600362322e-05, | |
| "loss": 3.1243, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7450053691864014, | |
| "learning_rate": 5.962571659967614e-05, | |
| "loss": 3.1198, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7652369141578674, | |
| "learning_rate": 5.946206614773606e-05, | |
| "loss": 3.1175, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7623115181922913, | |
| "learning_rate": 5.929831049982103e-05, | |
| "loss": 3.0896, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.836581289768219, | |
| "learning_rate": 5.9134451476509633e-05, | |
| "loss": 3.0571, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 0.7737760543823242, | |
| "learning_rate": 5.897049089952974e-05, | |
| "loss": 3.1586, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.8337840437889099, | |
| "learning_rate": 5.880643059173826e-05, | |
| "loss": 3.1192, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.8421131372451782, | |
| "learning_rate": 5.864227237710093e-05, | |
| "loss": 3.102, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.8616945147514343, | |
| "learning_rate": 5.847801808067189e-05, | |
| "loss": 3.1084, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.8136459589004517, | |
| "learning_rate": 5.831366952857357e-05, | |
| "loss": 3.0665, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.8897969722747803, | |
| "learning_rate": 5.814922854797622e-05, | |
| "loss": 3.1559, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.7970525026321411, | |
| "learning_rate": 5.798469696707775e-05, | |
| "loss": 3.1461, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.8494203090667725, | |
| "learning_rate": 5.782007661508331e-05, | |
| "loss": 3.0091, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.7653351426124573, | |
| "learning_rate": 5.765536932218495e-05, | |
| "loss": 3.0536, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.8680189251899719, | |
| "learning_rate": 5.7490576919541315e-05, | |
| "loss": 3.0907, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 0.8048584461212158, | |
| "learning_rate": 5.732570123925729e-05, | |
| "loss": 3.0949, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7515243291854858, | |
| "learning_rate": 5.7160744114363593e-05, | |
| "loss": 3.1158, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.756842315196991, | |
| "learning_rate": 5.699570737879641e-05, | |
| "loss": 3.026, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7633551359176636, | |
| "learning_rate": 5.683059286737702e-05, | |
| "loss": 3.1198, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7871392369270325, | |
| "learning_rate": 5.666540241579139e-05, | |
| "loss": 3.0589, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7806293964385986, | |
| "learning_rate": 5.6500137860569766e-05, | |
| "loss": 3.1131, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7823066711425781, | |
| "learning_rate": 5.633480103906624e-05, | |
| "loss": 3.0632, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.8392202258110046, | |
| "learning_rate": 5.616939378943834e-05, | |
| "loss": 3.1254, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7988219261169434, | |
| "learning_rate": 5.6003917950626595e-05, | |
| "loss": 3.0534, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7629736065864563, | |
| "learning_rate": 5.583837536233407e-05, | |
| "loss": 3.0923, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 0.7717956900596619, | |
| "learning_rate": 5.567276786500596e-05, | |
| "loss": 3.1394, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7379651069641113, | |
| "learning_rate": 5.5507097299809054e-05, | |
| "loss": 3.0808, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.783997118473053, | |
| "learning_rate": 5.534136550861133e-05, | |
| "loss": 3.1233, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7742591500282288, | |
| "learning_rate": 5.5175574333961465e-05, | |
| "loss": 3.1363, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.8608907461166382, | |
| "learning_rate": 5.500972561906832e-05, | |
| "loss": 3.094, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7653377056121826, | |
| "learning_rate": 5.484382120778048e-05, | |
| "loss": 3.0655, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.753713846206665, | |
| "learning_rate": 5.467786294456575e-05, | |
| "loss": 3.0486, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7956765294075012, | |
| "learning_rate": 5.451185267449061e-05, | |
| "loss": 3.0335, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.8601648807525635, | |
| "learning_rate": 5.43457922431998e-05, | |
| "loss": 3.0812, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.7874650955200195, | |
| "learning_rate": 5.417968349689566e-05, | |
| "loss": 3.061, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.7889178991317749, | |
| "learning_rate": 5.401352828231772e-05, | |
| "loss": 3.0777, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.8483275771141052, | |
| "learning_rate": 5.384732844672211e-05, | |
| "loss": 3.0562, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.7692832946777344, | |
| "learning_rate": 5.368108583786107e-05, | |
| "loss": 2.9819, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.0013173818588257, | |
| "learning_rate": 5.3514802303962344e-05, | |
| "loss": 3.073, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.8124668598175049, | |
| "learning_rate": 5.334847969370868e-05, | |
| "loss": 3.185, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.7622416615486145, | |
| "learning_rate": 5.3182119856217284e-05, | |
| "loss": 3.0727, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.7645342946052551, | |
| "learning_rate": 5.3015724641019214e-05, | |
| "loss": 3.1048, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.9504043459892273, | |
| "learning_rate": 5.284929589803884e-05, | |
| "loss": 3.087, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.7891102433204651, | |
| "learning_rate": 5.2682835477573336e-05, | |
| "loss": 3.0473, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 0.8160179853439331, | |
| "learning_rate": 5.2516345230271965e-05, | |
| "loss": 3.0516, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.8158875703811646, | |
| "learning_rate": 5.234982700711569e-05, | |
| "loss": 3.0536, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7385179400444031, | |
| "learning_rate": 5.218328265939643e-05, | |
| "loss": 3.0696, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7145212292671204, | |
| "learning_rate": 5.201671403869657e-05, | |
| "loss": 3.1129, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.8175336122512817, | |
| "learning_rate": 5.1850122996868366e-05, | |
| "loss": 3.0402, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7508947849273682, | |
| "learning_rate": 5.168351138601334e-05, | |
| "loss": 3.0134, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7701888084411621, | |
| "learning_rate": 5.1516881058461675e-05, | |
| "loss": 3.0564, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7426919341087341, | |
| "learning_rate": 5.135023386675166e-05, | |
| "loss": 2.959, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.8313874006271362, | |
| "learning_rate": 5.118357166360906e-05, | |
| "loss": 3.0031, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.7441394329071045, | |
| "learning_rate": 5.101689630192655e-05, | |
| "loss": 3.0683, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.831236720085144, | |
| "learning_rate": 5.085020963474307e-05, | |
| "loss": 3.0819, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.2524930238723755, | |
| "learning_rate": 5.068351351522329e-05, | |
| "loss": 3.0524, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.771321177482605, | |
| "learning_rate": 5.0516809796636935e-05, | |
| "loss": 3.0625, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.8278329372406006, | |
| "learning_rate": 5.035010033233821e-05, | |
| "loss": 3.0777, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.7575139999389648, | |
| "learning_rate": 5.018338697574523e-05, | |
| "loss": 3.0841, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.7472745180130005, | |
| "learning_rate": 5.0016671580319354e-05, | |
| "loss": 3.0823, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.769930899143219, | |
| "learning_rate": 4.984995599954461e-05, | |
| "loss": 3.1232, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.8989920020103455, | |
| "learning_rate": 4.968324208690712e-05, | |
| "loss": 3.0465, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.8685335516929626, | |
| "learning_rate": 4.951653169587441e-05, | |
| "loss": 3.1068, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 0.7176471948623657, | |
| "learning_rate": 4.93498266798749e-05, | |
| "loss": 2.9779, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.7075949311256409, | |
| "learning_rate": 4.918312889227722e-05, | |
| "loss": 2.9339, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.8076692819595337, | |
| "learning_rate": 4.901644018636966e-05, | |
| "loss": 3.0333, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.7829530239105225, | |
| "learning_rate": 4.8849762415339526e-05, | |
| "loss": 3.0575, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.728006899356842, | |
| "learning_rate": 4.868309743225256e-05, | |
| "loss": 3.0128, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.7966864705085754, | |
| "learning_rate": 4.851644709003233e-05, | |
| "loss": 3.0176, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.8182801604270935, | |
| "learning_rate": 4.834981324143964e-05, | |
| "loss": 2.9736, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.8191970586776733, | |
| "learning_rate": 4.818319773905191e-05, | |
| "loss": 2.996, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.8786250352859497, | |
| "learning_rate": 4.801660243524261e-05, | |
| "loss": 3.0239, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.7968888282775879, | |
| "learning_rate": 4.7850029182160626e-05, | |
| "loss": 3.0386, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.8909465670585632, | |
| "learning_rate": 4.768347983170973e-05, | |
| "loss": 3.0715, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.809687077999115, | |
| "learning_rate": 4.7516956235527884e-05, | |
| "loss": 2.9583, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.7980312705039978, | |
| "learning_rate": 4.735046024496682e-05, | |
| "loss": 3.0156, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.8982890248298645, | |
| "learning_rate": 4.7183993711071286e-05, | |
| "loss": 3.0072, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.9279652237892151, | |
| "learning_rate": 4.7017558484558554e-05, | |
| "loss": 3.0205, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.7835025191307068, | |
| "learning_rate": 4.6851156415797844e-05, | |
| "loss": 3.0498, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.7307499051094055, | |
| "learning_rate": 4.6684789354789746e-05, | |
| "loss": 3.0008, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.7745723724365234, | |
| "learning_rate": 4.651845915114563e-05, | |
| "loss": 3.0332, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.7176873683929443, | |
| "learning_rate": 4.6352167654067095e-05, | |
| "loss": 3.0304, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 0.8944629430770874, | |
| "learning_rate": 4.618591671232544e-05, | |
| "loss": 3.07, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.7425732612609863, | |
| "learning_rate": 4.601970817424106e-05, | |
| "loss": 3.0369, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.7213885188102722, | |
| "learning_rate": 4.585354388766292e-05, | |
| "loss": 3.0521, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.8457189798355103, | |
| "learning_rate": 4.568742569994802e-05, | |
| "loss": 3.0551, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.7497371435165405, | |
| "learning_rate": 4.552135545794086e-05, | |
| "loss": 3.0608, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.7387011647224426, | |
| "learning_rate": 4.535533500795288e-05, | |
| "loss": 3.0107, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.7279102206230164, | |
| "learning_rate": 4.5189366195741953e-05, | |
| "loss": 2.9787, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.7574561834335327, | |
| "learning_rate": 4.502345086649186e-05, | |
| "loss": 3.0479, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.7569758892059326, | |
| "learning_rate": 4.485759086479179e-05, | |
| "loss": 3.0732, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.7664837837219238, | |
| "learning_rate": 4.469178803461579e-05, | |
| "loss": 3.0133, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.779332160949707, | |
| "learning_rate": 4.4526044219302326e-05, | |
| "loss": 3.0355, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7874385714530945, | |
| "learning_rate": 4.4360361261533745e-05, | |
| "loss": 3.0803, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.8164911270141602, | |
| "learning_rate": 4.419474100331579e-05, | |
| "loss": 3.0362, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7843874096870422, | |
| "learning_rate": 4.402918528595715e-05, | |
| "loss": 3.0466, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7718837261199951, | |
| "learning_rate": 4.386369595004896e-05, | |
| "loss": 2.9616, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7669267654418945, | |
| "learning_rate": 4.3698274835444354e-05, | |
| "loss": 3.0097, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7109093070030212, | |
| "learning_rate": 4.3532923781238e-05, | |
| "loss": 3.0104, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.756790816783905, | |
| "learning_rate": 4.336764462574566e-05, | |
| "loss": 3.0167, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.8009018898010254, | |
| "learning_rate": 4.320243920648376e-05, | |
| "loss": 2.9841, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7864429950714111, | |
| "learning_rate": 4.303730936014894e-05, | |
| "loss": 2.9678, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.7420701384544373, | |
| "learning_rate": 4.287225692259765e-05, | |
| "loss": 3.131, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.7833184003829956, | |
| "learning_rate": 4.270728372882575e-05, | |
| "loss": 2.9289, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.0203622579574585, | |
| "learning_rate": 4.254239161294804e-05, | |
| "loss": 3.0243, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.7398558855056763, | |
| "learning_rate": 4.237758240817802e-05, | |
| "loss": 2.9651, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.7736649513244629, | |
| "learning_rate": 4.2212857946807336e-05, | |
| "loss": 3.0103, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.7363585829734802, | |
| "learning_rate": 4.2048220060185516e-05, | |
| "loss": 3.0684, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.8192570209503174, | |
| "learning_rate": 4.188367057869957e-05, | |
| "loss": 2.9984, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.7691248655319214, | |
| "learning_rate": 4.171921133175365e-05, | |
| "loss": 2.9445, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.7939426898956299, | |
| "learning_rate": 4.155484414774872e-05, | |
| "loss": 2.9812, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.7908325791358948, | |
| "learning_rate": 4.139057085406221e-05, | |
| "loss": 2.9316, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.8224896788597107, | |
| "learning_rate": 4.1226393277027726e-05, | |
| "loss": 3.0426, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.7719295024871826, | |
| "learning_rate": 4.106231324191471e-05, | |
| "loss": 3.0278, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.7827658653259277, | |
| "learning_rate": 4.089833257290817e-05, | |
| "loss": 2.952, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.7247050404548645, | |
| "learning_rate": 4.073445309308842e-05, | |
| "loss": 3.0452, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.7727264761924744, | |
| "learning_rate": 4.0570676624410756e-05, | |
| "loss": 3.0622, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.7711555361747742, | |
| "learning_rate": 4.040700498768525e-05, | |
| "loss": 3.0039, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.7570649981498718, | |
| "learning_rate": 4.024344000255648e-05, | |
| "loss": 3.0374, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.7238669395446777, | |
| "learning_rate": 4.0079983487483313e-05, | |
| "loss": 3.1027, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.7199037075042725, | |
| "learning_rate": 3.9916637259718683e-05, | |
| "loss": 2.9343, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 0.7784489393234253, | |
| "learning_rate": 3.9753403135289396e-05, | |
| "loss": 2.9833, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.7967301607131958, | |
| "learning_rate": 3.9590282928975914e-05, | |
| "loss": 2.9837, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.818950891494751, | |
| "learning_rate": 3.942727845429221e-05, | |
| "loss": 2.9641, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.7812902927398682, | |
| "learning_rate": 3.926439152346558e-05, | |
| "loss": 3.012, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.7703650593757629, | |
| "learning_rate": 3.910162394741653e-05, | |
| "loss": 3.0109, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.7691267728805542, | |
| "learning_rate": 3.893897753573861e-05, | |
| "loss": 3.0202, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.7319338917732239, | |
| "learning_rate": 3.877645409667829e-05, | |
| "loss": 2.9708, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.7321139574050903, | |
| "learning_rate": 3.861405543711491e-05, | |
| "loss": 3.051, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.7825955152511597, | |
| "learning_rate": 3.8451783362540507e-05, | |
| "loss": 2.952, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 0.7175664901733398, | |
| "learning_rate": 3.828963967703983e-05, | |
| "loss": 2.9443, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7269397974014282, | |
| "learning_rate": 3.8127626183270223e-05, | |
| "loss": 2.963, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7328863143920898, | |
| "learning_rate": 3.796574468244161e-05, | |
| "loss": 2.9987, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7537096738815308, | |
| "learning_rate": 3.7803996974296444e-05, | |
| "loss": 2.987, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7476091980934143, | |
| "learning_rate": 3.7642384857089776e-05, | |
| "loss": 2.9432, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7222900390625, | |
| "learning_rate": 3.748091012756915e-05, | |
| "loss": 2.9411, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7150359153747559, | |
| "learning_rate": 3.731957458095467e-05, | |
| "loss": 2.9797, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7514713406562805, | |
| "learning_rate": 3.71583800109191e-05, | |
| "loss": 2.9989, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7849773168563843, | |
| "learning_rate": 3.699732820956784e-05, | |
| "loss": 2.9185, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.8713308572769165, | |
| "learning_rate": 3.6836420967419057e-05, | |
| "loss": 2.9673, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 0.7036404609680176, | |
| "learning_rate": 3.6675660073383745e-05, | |
| "loss": 2.9765, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.8053275346755981, | |
| "learning_rate": 3.6515047314745856e-05, | |
| "loss": 2.9215, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.8694131374359131, | |
| "learning_rate": 3.6354584477142437e-05, | |
| "loss": 3.0497, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7967677116394043, | |
| "learning_rate": 3.6194273344543736e-05, | |
| "loss": 2.9778, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7926526069641113, | |
| "learning_rate": 3.6034115699233425e-05, | |
| "loss": 2.9845, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.867080807685852, | |
| "learning_rate": 3.5874113321788736e-05, | |
| "loss": 2.9635, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.821435809135437, | |
| "learning_rate": 3.571426799106071e-05, | |
| "loss": 3.0256, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7598234415054321, | |
| "learning_rate": 3.555458148415437e-05, | |
| "loss": 2.8754, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7543870806694031, | |
| "learning_rate": 3.539505557640901e-05, | |
| "loss": 2.8954, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7671110033988953, | |
| "learning_rate": 3.523569204137843e-05, | |
| "loss": 2.9985, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.7498314380645752, | |
| "learning_rate": 3.5076492650811246e-05, | |
| "loss": 2.9652, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.8090139627456665, | |
| "learning_rate": 3.491745917463113e-05, | |
| "loss": 3.0449, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.745242714881897, | |
| "learning_rate": 3.475859338091721e-05, | |
| "loss": 2.9741, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.7507796883583069, | |
| "learning_rate": 3.4599897035884374e-05, | |
| "loss": 2.9982, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.7105052471160889, | |
| "learning_rate": 3.444137190386363e-05, | |
| "loss": 2.8603, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.8144690990447998, | |
| "learning_rate": 3.4283019747282514e-05, | |
| "loss": 2.8923, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.7487353682518005, | |
| "learning_rate": 3.412484232664545e-05, | |
| "loss": 2.9113, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.7318117022514343, | |
| "learning_rate": 3.396684140051424e-05, | |
| "loss": 2.9088, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.7745131850242615, | |
| "learning_rate": 3.3809018725488466e-05, | |
| "loss": 2.9181, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.7339638471603394, | |
| "learning_rate": 3.365137605618598e-05, | |
| "loss": 2.8896, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.70101398229599, | |
| "learning_rate": 3.3493915145223395e-05, | |
| "loss": 2.8815, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7292600870132446, | |
| "learning_rate": 3.3336637743196584e-05, | |
| "loss": 2.9279, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7292124032974243, | |
| "learning_rate": 3.317954559866126e-05, | |
| "loss": 2.9919, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7561968564987183, | |
| "learning_rate": 3.302264045811344e-05, | |
| "loss": 2.9263, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7866065502166748, | |
| "learning_rate": 3.286592406597021e-05, | |
| "loss": 2.9578, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7142008543014526, | |
| "learning_rate": 3.270939816455012e-05, | |
| "loss": 2.9274, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7289608716964722, | |
| "learning_rate": 3.255306449405395e-05, | |
| "loss": 2.9959, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7562645077705383, | |
| "learning_rate": 3.2396924792545304e-05, | |
| "loss": 2.9522, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7758344411849976, | |
| "learning_rate": 3.224098079593132e-05, | |
| "loss": 2.9931, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 0.7064464688301086, | |
| "learning_rate": 3.2085234237943354e-05, | |
| "loss": 2.9511, | |
| "step": 3005 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.7782897353172302, | |
| "learning_rate": 3.19296868501177e-05, | |
| "loss": 2.9672, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.7506510019302368, | |
| "learning_rate": 3.177434036177636e-05, | |
| "loss": 2.9373, | |
| "step": 3015 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.782500147819519, | |
| "learning_rate": 3.1619196500007804e-05, | |
| "loss": 2.9374, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.7468158602714539, | |
| "learning_rate": 3.146425698964776e-05, | |
| "loss": 3.0418, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.7175282835960388, | |
| "learning_rate": 3.1309523553260046e-05, | |
| "loss": 2.9846, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.710383415222168, | |
| "learning_rate": 3.115499791111743e-05, | |
| "loss": 2.9313, | |
| "step": 3035 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.716145932674408, | |
| "learning_rate": 3.10006817811825e-05, | |
| "loss": 2.9134, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.696806788444519, | |
| "learning_rate": 3.084657687908855e-05, | |
| "loss": 2.9742, | |
| "step": 3045 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.1181516647338867, | |
| "learning_rate": 3.069268491812052e-05, | |
| "loss": 2.9468, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.7641059160232544, | |
| "learning_rate": 3.0539007609195934e-05, | |
| "loss": 2.9699, | |
| "step": 3055 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.7946372032165527, | |
| "learning_rate": 3.0385546660845908e-05, | |
| "loss": 2.943, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.2118492126464844, | |
| "learning_rate": 3.0232303779196132e-05, | |
| "loss": 2.8909, | |
| "step": 3065 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.7043571472167969, | |
| "learning_rate": 3.0079280667947885e-05, | |
| "loss": 3.0081, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.7849633097648621, | |
| "learning_rate": 2.9926479028359132e-05, | |
| "loss": 2.9086, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.71959388256073, | |
| "learning_rate": 2.97739005592256e-05, | |
| "loss": 2.8912, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.7964043617248535, | |
| "learning_rate": 2.962154695686187e-05, | |
| "loss": 2.9814, | |
| "step": 3085 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.7292230725288391, | |
| "learning_rate": 2.9469419915082536e-05, | |
| "loss": 2.9055, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.7503044605255127, | |
| "learning_rate": 2.9317521125183368e-05, | |
| "loss": 2.9198, | |
| "step": 3095 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.738675594329834, | |
| "learning_rate": 2.9165852275922524e-05, | |
| "loss": 2.9487, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.7485631704330444, | |
| "learning_rate": 2.901441505350174e-05, | |
| "loss": 2.9318, | |
| "step": 3105 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.7371665835380554, | |
| "learning_rate": 2.886321114154762e-05, | |
| "loss": 2.9264, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.7446396946907043, | |
| "learning_rate": 2.87122422210929e-05, | |
| "loss": 2.9499, | |
| "step": 3115 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.7185680270195007, | |
| "learning_rate": 2.8561509970557736e-05, | |
| "loss": 2.9115, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.7560344338417053, | |
| "learning_rate": 2.8411016065731146e-05, | |
| "loss": 2.9038, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.7304858565330505, | |
| "learning_rate": 2.826076217975222e-05, | |
| "loss": 2.9819, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.7766464948654175, | |
| "learning_rate": 2.8110749983091632e-05, | |
| "loss": 2.9908, | |
| "step": 3135 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.7244294285774231, | |
| "learning_rate": 2.7960981143533053e-05, | |
| "loss": 2.8959, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.7433860898017883, | |
| "learning_rate": 2.781145732615457e-05, | |
| "loss": 3.0022, | |
| "step": 3145 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 0.7486001253128052, | |
| "learning_rate": 2.7662180193310218e-05, | |
| "loss": 2.9627, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.7516294121742249, | |
| "learning_rate": 2.751315140461145e-05, | |
| "loss": 2.9492, | |
| "step": 3155 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.7541138529777527, | |
| "learning_rate": 2.7364372616908744e-05, | |
| "loss": 2.9272, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.7532901167869568, | |
| "learning_rate": 2.7215845484273152e-05, | |
| "loss": 2.9088, | |
| "step": 3165 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.7467599511146545, | |
| "learning_rate": 2.7067571657977893e-05, | |
| "loss": 2.9059, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.7376793026924133, | |
| "learning_rate": 2.691955278648003e-05, | |
| "loss": 2.9338, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.744006335735321, | |
| "learning_rate": 2.6771790515402112e-05, | |
| "loss": 2.9559, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.7515101432800293, | |
| "learning_rate": 2.6624286487513916e-05, | |
| "loss": 2.9671, | |
| "step": 3185 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.7245140075683594, | |
| "learning_rate": 2.6477042342714137e-05, | |
| "loss": 2.8971, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.75704026222229, | |
| "learning_rate": 2.633005971801219e-05, | |
| "loss": 2.9585, | |
| "step": 3195 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.136002540588379, | |
| "learning_rate": 2.6183340247510013e-05, | |
| "loss": 2.9161, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.7156395316123962, | |
| "learning_rate": 2.6036885562383856e-05, | |
| "loss": 2.9656, | |
| "step": 3205 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.7116406559944153, | |
| "learning_rate": 2.5890697290866206e-05, | |
| "loss": 2.8739, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.7035011053085327, | |
| "learning_rate": 2.5744777058227642e-05, | |
| "loss": 2.9376, | |
| "step": 3215 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.7160550355911255, | |
| "learning_rate": 2.5599126486758777e-05, | |
| "loss": 2.9457, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.7138339281082153, | |
| "learning_rate": 2.5453747195752243e-05, | |
| "loss": 2.9514, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.7035452723503113, | |
| "learning_rate": 2.530864080148464e-05, | |
| "loss": 3.0333, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.7056491374969482, | |
| "learning_rate": 2.5163808917198615e-05, | |
| "loss": 2.9416, | |
| "step": 3235 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.7584891319274902, | |
| "learning_rate": 2.501925315308492e-05, | |
| "loss": 2.9302, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.7068156003952026, | |
| "learning_rate": 2.4874975116264477e-05, | |
| "loss": 2.9203, | |
| "step": 3245 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.6893326640129089, | |
| "learning_rate": 2.4730976410770534e-05, | |
| "loss": 2.9909, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.7033855319023132, | |
| "learning_rate": 2.458725863753084e-05, | |
| "loss": 2.878, | |
| "step": 3255 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.7077926993370056, | |
| "learning_rate": 2.4443823394349834e-05, | |
| "loss": 2.8663, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.7133041024208069, | |
| "learning_rate": 2.430067227589088e-05, | |
| "loss": 2.9166, | |
| "step": 3265 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.726646363735199, | |
| "learning_rate": 2.4157806873658517e-05, | |
| "loss": 2.8886, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.7560091018676758, | |
| "learning_rate": 2.401522877598087e-05, | |
| "loss": 2.9987, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.7593598365783691, | |
| "learning_rate": 2.3872939567991827e-05, | |
| "loss": 2.9374, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.7126137614250183, | |
| "learning_rate": 2.373094083161353e-05, | |
| "loss": 2.9287, | |
| "step": 3285 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.7201817631721497, | |
| "learning_rate": 2.358923414553877e-05, | |
| "loss": 2.9828, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.7259865403175354, | |
| "learning_rate": 2.3447821085213405e-05, | |
| "loss": 2.9299, | |
| "step": 3295 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.7654474973678589, | |
| "learning_rate": 2.3306703222818878e-05, | |
| "loss": 2.8601, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.6903141736984253, | |
| "learning_rate": 2.3165882127254705e-05, | |
| "loss": 2.8628, | |
| "step": 3305 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.7235303521156311, | |
| "learning_rate": 2.302535936412108e-05, | |
| "loss": 2.9297, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.7143175601959229, | |
| "learning_rate": 2.2885136495701415e-05, | |
| "loss": 2.8943, | |
| "step": 3315 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.7538760304450989, | |
| "learning_rate": 2.274521508094501e-05, | |
| "loss": 2.9474, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.7201484441757202, | |
| "learning_rate": 2.2605596675449698e-05, | |
| "loss": 2.9409, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.7387991547584534, | |
| "learning_rate": 2.246628283144457e-05, | |
| "loss": 2.9173, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.7402165532112122, | |
| "learning_rate": 2.232727509777269e-05, | |
| "loss": 2.9374, | |
| "step": 3335 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.7230446934700012, | |
| "learning_rate": 2.2188575019873932e-05, | |
| "loss": 3.0087, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.7437344789505005, | |
| "learning_rate": 2.2050184139767704e-05, | |
| "loss": 2.9226, | |
| "step": 3345 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.706643283367157, | |
| "learning_rate": 2.191210399603591e-05, | |
| "loss": 2.8564, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.7320424914360046, | |
| "learning_rate": 2.1774336123805772e-05, | |
| "loss": 2.9304, | |
| "step": 3355 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.7519949078559875, | |
| "learning_rate": 2.1636882054732776e-05, | |
| "loss": 2.9363, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.750419020652771, | |
| "learning_rate": 2.1499743316983684e-05, | |
| "loss": 2.9254, | |
| "step": 3365 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.7141222953796387, | |
| "learning_rate": 2.1362921435219473e-05, | |
| "loss": 2.9119, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.719929039478302, | |
| "learning_rate": 2.1226417930578464e-05, | |
| "loss": 3.0009, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.7420862913131714, | |
| "learning_rate": 2.109023432065935e-05, | |
| "loss": 2.9417, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.7002443671226501, | |
| "learning_rate": 2.095437211950434e-05, | |
| "loss": 2.9186, | |
| "step": 3385 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.7307898998260498, | |
| "learning_rate": 2.0818832837582352e-05, | |
| "loss": 2.9545, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7779083847999573, | |
| "learning_rate": 2.068361798177218e-05, | |
| "loss": 2.9547, | |
| "step": 3395 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7477301359176636, | |
| "learning_rate": 2.0548729055345778e-05, | |
| "loss": 2.8895, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7072484493255615, | |
| "learning_rate": 2.0414167557951514e-05, | |
| "loss": 2.8911, | |
| "step": 3405 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.766034722328186, | |
| "learning_rate": 2.0279934985597527e-05, | |
| "loss": 2.9381, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7417451739311218, | |
| "learning_rate": 2.0146032830635054e-05, | |
| "loss": 3.0118, | |
| "step": 3415 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.30099356174469, | |
| "learning_rate": 2.001246258174192e-05, | |
| "loss": 2.9095, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.6984534859657288, | |
| "learning_rate": 1.9879225723905886e-05, | |
| "loss": 2.8572, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7067037224769592, | |
| "learning_rate": 1.9746323738408203e-05, | |
| "loss": 2.8692, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7694417834281921, | |
| "learning_rate": 1.9613758102807117e-05, | |
| "loss": 2.8991, | |
| "step": 3435 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.7420687079429626, | |
| "learning_rate": 1.9481530290921474e-05, | |
| "loss": 2.8858, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.7748597860336304, | |
| "learning_rate": 1.934964177281428e-05, | |
| "loss": 2.9006, | |
| "step": 3445 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.7901410460472107, | |
| "learning_rate": 1.9218094014776434e-05, | |
| "loss": 2.9766, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.7284368872642517, | |
| "learning_rate": 1.9086888479310333e-05, | |
| "loss": 2.9579, | |
| "step": 3455 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.7197433710098267, | |
| "learning_rate": 1.895602662511371e-05, | |
| "loss": 2.8325, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.762126088142395, | |
| "learning_rate": 1.8825509907063327e-05, | |
| "loss": 2.945, | |
| "step": 3465 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.7177615761756897, | |
| "learning_rate": 1.8695339776198872e-05, | |
| "loss": 2.8931, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.6981935501098633, | |
| "learning_rate": 1.8565517679706783e-05, | |
| "loss": 2.9542, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.7407300472259521, | |
| "learning_rate": 1.8436045060904174e-05, | |
| "loss": 2.8342, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.705169141292572, | |
| "learning_rate": 1.830692335922279e-05, | |
| "loss": 2.9558, | |
| "step": 3485 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.7445974946022034, | |
| "learning_rate": 1.8178154010192994e-05, | |
| "loss": 2.9693, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.766822874546051, | |
| "learning_rate": 1.8049738445427822e-05, | |
| "loss": 2.905, | |
| "step": 3495 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.7473999261856079, | |
| "learning_rate": 1.7921678092607052e-05, | |
| "loss": 2.9527, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.7297512292861938, | |
| "learning_rate": 1.7793974375461352e-05, | |
| "loss": 2.8616, | |
| "step": 3505 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.7029373049736023, | |
| "learning_rate": 1.7666628713756417e-05, | |
| "loss": 2.8881, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.7450504899024963, | |
| "learning_rate": 1.7539642523277228e-05, | |
| "loss": 2.8489, | |
| "step": 3515 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.7007685899734497, | |
| "learning_rate": 1.7413017215812273e-05, | |
| "loss": 2.8733, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.7155572772026062, | |
| "learning_rate": 1.728675419913788e-05, | |
| "loss": 2.8785, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.7719442248344421, | |
| "learning_rate": 1.716085487700253e-05, | |
| "loss": 2.8869, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.7274740934371948, | |
| "learning_rate": 1.703532064911131e-05, | |
| "loss": 3.0106, | |
| "step": 3535 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.7263054251670837, | |
| "learning_rate": 1.6910152911110283e-05, | |
| "loss": 2.9295, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.7400492429733276, | |
| "learning_rate": 1.6785353054571024e-05, | |
| "loss": 2.8889, | |
| "step": 3545 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.7219212055206299, | |
| "learning_rate": 1.666092246697512e-05, | |
| "loss": 2.9175, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.7377539873123169, | |
| "learning_rate": 1.6536862531698766e-05, | |
| "loss": 2.841, | |
| "step": 3555 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.7199496626853943, | |
| "learning_rate": 1.6413174627997328e-05, | |
| "loss": 2.8737, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.7269471883773804, | |
| "learning_rate": 1.6289860130990147e-05, | |
| "loss": 2.8675, | |
| "step": 3565 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.7616569399833679, | |
| "learning_rate": 1.6166920411645064e-05, | |
| "loss": 2.9519, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.8098664283752441, | |
| "learning_rate": 1.6044356836763315e-05, | |
| "loss": 2.9393, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.709020733833313, | |
| "learning_rate": 1.5922170768964285e-05, | |
| "loss": 2.9626, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.7333164811134338, | |
| "learning_rate": 1.5800363566670362e-05, | |
| "loss": 2.9196, | |
| "step": 3585 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.7305191159248352, | |
| "learning_rate": 1.5678936584091852e-05, | |
| "loss": 2.9066, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.7371678948402405, | |
| "learning_rate": 1.5557891171211892e-05, | |
| "loss": 2.8998, | |
| "step": 3595 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.7497947216033936, | |
| "learning_rate": 1.5437228673771465e-05, | |
| "loss": 2.8854, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.7395901083946228, | |
| "learning_rate": 1.5316950433254445e-05, | |
| "loss": 2.8826, | |
| "step": 3605 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.7346936464309692, | |
| "learning_rate": 1.5197057786872649e-05, | |
| "loss": 2.8816, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.6819217205047607, | |
| "learning_rate": 1.5077552067551015e-05, | |
| "loss": 2.8385, | |
| "step": 3615 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.7093620300292969, | |
| "learning_rate": 1.4958434603912747e-05, | |
| "loss": 2.8778, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.7439285516738892, | |
| "learning_rate": 1.4839706720264546e-05, | |
| "loss": 2.8469, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.7112381458282471, | |
| "learning_rate": 1.4721369736581924e-05, | |
| "loss": 2.9407, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.7157320380210876, | |
| "learning_rate": 1.4603424968494484e-05, | |
| "loss": 2.8217, | |
| "step": 3635 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.759329617023468, | |
| "learning_rate": 1.448587372727132e-05, | |
| "loss": 2.9403, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.7280795574188232, | |
| "learning_rate": 1.4368717319806419e-05, | |
| "loss": 2.8757, | |
| "step": 3645 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.7509269714355469, | |
| "learning_rate": 1.4251957048604152e-05, | |
| "loss": 2.9107, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.7029860615730286, | |
| "learning_rate": 1.413559421176479e-05, | |
| "loss": 2.9097, | |
| "step": 3655 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.7911568880081177, | |
| "learning_rate": 1.4019630102970056e-05, | |
| "loss": 2.9053, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.727408766746521, | |
| "learning_rate": 1.3904066011468753e-05, | |
| "loss": 2.9365, | |
| "step": 3665 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.7638286352157593, | |
| "learning_rate": 1.3788903222062433e-05, | |
| "loss": 2.8833, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.7450382113456726, | |
| "learning_rate": 1.3674143015091118e-05, | |
| "loss": 2.9039, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.7118229866027832, | |
| "learning_rate": 1.355978666641905e-05, | |
| "loss": 2.8631, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.6794108748435974, | |
| "learning_rate": 1.3445835447420507e-05, | |
| "loss": 2.877, | |
| "step": 3685 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.7002469301223755, | |
| "learning_rate": 1.3332290624965688e-05, | |
| "loss": 2.9423, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.7321146130561829, | |
| "learning_rate": 1.3219153461406609e-05, | |
| "loss": 2.887, | |
| "step": 3695 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.7479889392852783, | |
| "learning_rate": 1.3106425214563078e-05, | |
| "loss": 2.9111, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.7170171737670898, | |
| "learning_rate": 1.2994107137708716e-05, | |
| "loss": 2.7711, | |
| "step": 3705 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.706071674823761, | |
| "learning_rate": 1.2882200479556988e-05, | |
| "loss": 2.8921, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.7215095162391663, | |
| "learning_rate": 1.2770706484247397e-05, | |
| "loss": 2.9752, | |
| "step": 3715 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.7218484878540039, | |
| "learning_rate": 1.2659626391331564e-05, | |
| "loss": 2.8129, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.7251324653625488, | |
| "learning_rate": 1.2548961435759493e-05, | |
| "loss": 2.9128, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.709524929523468, | |
| "learning_rate": 1.2438712847865846e-05, | |
| "loss": 2.865, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.7283322215080261, | |
| "learning_rate": 1.2328881853356244e-05, | |
| "loss": 2.9329, | |
| "step": 3735 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.7131749391555786, | |
| "learning_rate": 1.221946967329365e-05, | |
| "loss": 2.969, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.7404288649559021, | |
| "learning_rate": 1.2110477524084796e-05, | |
| "loss": 2.9074, | |
| "step": 3745 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.6952566504478455, | |
| "learning_rate": 1.2001906617466657e-05, | |
| "loss": 2.9729, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.7236440181732178, | |
| "learning_rate": 1.1893758160492978e-05, | |
| "loss": 2.8736, | |
| "step": 3755 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.7026664018630981, | |
| "learning_rate": 1.1786033355520859e-05, | |
| "loss": 2.8477, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.7117552757263184, | |
| "learning_rate": 1.1678733400197373e-05, | |
| "loss": 2.8622, | |
| "step": 3765 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.7323325872421265, | |
| "learning_rate": 1.1571859487446263e-05, | |
| "loss": 2.9097, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.7208499312400818, | |
| "learning_rate": 1.1465412805454695e-05, | |
| "loss": 2.9042, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.7203373312950134, | |
| "learning_rate": 1.1359394537660011e-05, | |
| "loss": 2.9209, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.6808639764785767, | |
| "learning_rate": 1.125380586273661e-05, | |
| "loss": 2.8562, | |
| "step": 3785 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.7083504796028137, | |
| "learning_rate": 1.1148647954582808e-05, | |
| "loss": 2.901, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.7643154263496399, | |
| "learning_rate": 1.1043921982307819e-05, | |
| "loss": 2.8947, | |
| "step": 3795 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.714851438999176, | |
| "learning_rate": 1.0939629110218735e-05, | |
| "loss": 2.9259, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.7194553017616272, | |
| "learning_rate": 1.0835770497807596e-05, | |
| "loss": 2.8657, | |
| "step": 3805 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.7192110419273376, | |
| "learning_rate": 1.0732347299738493e-05, | |
| "loss": 2.8196, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.735783040523529, | |
| "learning_rate": 1.0629360665834732e-05, | |
| "loss": 2.9234, | |
| "step": 3815 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.7472171187400818, | |
| "learning_rate": 1.052681174106604e-05, | |
| "loss": 2.9279, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7456896901130676, | |
| "learning_rate": 1.0424701665535852e-05, | |
| "loss": 2.9278, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7269583940505981, | |
| "learning_rate": 1.0323031574468638e-05, | |
| "loss": 2.8864, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7369275689125061, | |
| "learning_rate": 1.0221802598197261e-05, | |
| "loss": 2.8441, | |
| "step": 3835 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.6950053572654724, | |
| "learning_rate": 1.0121015862150423e-05, | |
| "loss": 2.8309, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7089066505432129, | |
| "learning_rate": 1.0020672486840154e-05, | |
| "loss": 2.9491, | |
| "step": 3845 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.6992034316062927, | |
| "learning_rate": 9.920773587849364e-06, | |
| "loss": 2.865, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7451211214065552, | |
| "learning_rate": 9.821320275819401e-06, | |
| "loss": 2.9658, | |
| "step": 3855 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7073239684104919, | |
| "learning_rate": 9.72231365643777e-06, | |
| "loss": 2.8564, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7056171894073486, | |
| "learning_rate": 9.623754830425779e-06, | |
| "loss": 2.8252, | |
| "step": 3865 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.7366864085197449, | |
| "learning_rate": 9.52564489352632e-06, | |
| "loss": 2.8905, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.712018609046936, | |
| "learning_rate": 9.427984936491702e-06, | |
| "loss": 2.9405, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.7274293303489685, | |
| "learning_rate": 9.330776045071509e-06, | |
| "loss": 2.9513, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.709893524646759, | |
| "learning_rate": 9.23401930000054e-06, | |
| "loss": 2.9173, | |
| "step": 3885 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.7511017918586731, | |
| "learning_rate": 9.137715776986772e-06, | |
| "loss": 2.9102, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.7051652669906616, | |
| "learning_rate": 9.041866546699434e-06, | |
| "loss": 2.9385, | |
| "step": 3895 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.7151492238044739, | |
| "learning_rate": 8.946472674757078e-06, | |
| "loss": 2.9219, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.7474796772003174, | |
| "learning_rate": 8.851535221715735e-06, | |
| "loss": 2.8658, | |
| "step": 3905 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.6855131387710571, | |
| "learning_rate": 8.757055243057132e-06, | |
| "loss": 2.9281, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.7005223631858826, | |
| "learning_rate": 8.663033789176967e-06, | |
| "loss": 2.9037, | |
| "step": 3915 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.7284826636314392, | |
| "learning_rate": 8.5694719053732e-06, | |
| "loss": 2.9408, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.7069577574729919, | |
| "learning_rate": 8.476370631834458e-06, | |
| "loss": 2.9111, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.7095098495483398, | |
| "learning_rate": 8.383731003628452e-06, | |
| "loss": 2.9141, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.7137646079063416, | |
| "learning_rate": 8.291554050690508e-06, | |
| "loss": 2.9036, | |
| "step": 3935 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.7282265424728394, | |
| "learning_rate": 8.199840797812058e-06, | |
| "loss": 2.8683, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.7325572967529297, | |
| "learning_rate": 8.108592264629295e-06, | |
| "loss": 2.9627, | |
| "step": 3945 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.7181254625320435, | |
| "learning_rate": 8.017809465611803e-06, | |
| "loss": 2.7934, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.7210270166397095, | |
| "learning_rate": 7.927493410051324e-06, | |
| "loss": 2.8468, | |
| "step": 3955 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.6905809044837952, | |
| "learning_rate": 7.837645102050473e-06, | |
| "loss": 2.8476, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.7088523507118225, | |
| "learning_rate": 7.748265540511635e-06, | |
| "loss": 2.8992, | |
| "step": 3965 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.6808198690414429, | |
| "learning_rate": 7.65935571912582e-06, | |
| "loss": 2.8295, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.710476815700531, | |
| "learning_rate": 7.5709166263616405e-06, | |
| "loss": 2.849, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.7097440958023071, | |
| "learning_rate": 7.482949245454302e-06, | |
| "loss": 2.8524, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.7230978608131409, | |
| "learning_rate": 7.3954545543946876e-06, | |
| "loss": 2.8118, | |
| "step": 3985 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.6982550621032715, | |
| "learning_rate": 7.308433525918468e-06, | |
| "loss": 2.8693, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.712472677230835, | |
| "learning_rate": 7.221887127495313e-06, | |
| "loss": 2.8415, | |
| "step": 3995 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.7155121564865112, | |
| "learning_rate": 7.1358163213181114e-06, | |
| "loss": 2.9374, | |
| "step": 4000 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 4811, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 1000, | |
| "total_flos": 1.0865380348133376e+19, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |