| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 2226, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.013490725126475547, | |
| "grad_norm": 65.45332336425781, | |
| "learning_rate": 0.00019964061096136568, | |
| "loss": 22.1843, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.026981450252951095, | |
| "grad_norm": 6.102770805358887, | |
| "learning_rate": 0.00019874213836477988, | |
| "loss": 7.6527, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04047217537942664, | |
| "grad_norm": 2.982379913330078, | |
| "learning_rate": 0.00019784366576819408, | |
| "loss": 3.4532, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.05396290050590219, | |
| "grad_norm": 3.4628562927246094, | |
| "learning_rate": 0.00019694519317160826, | |
| "loss": 2.4467, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06745362563237774, | |
| "grad_norm": 3.0008349418640137, | |
| "learning_rate": 0.00019604672057502246, | |
| "loss": 1.6481, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.08094435075885328, | |
| "grad_norm": 1.767865777015686, | |
| "learning_rate": 0.00019514824797843666, | |
| "loss": 1.2119, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.09443507588532883, | |
| "grad_norm": 3.416968584060669, | |
| "learning_rate": 0.00019424977538185087, | |
| "loss": 0.967, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.10792580101180438, | |
| "grad_norm": 3.718262195587158, | |
| "learning_rate": 0.00019335130278526507, | |
| "loss": 0.8783, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.12141652613827993, | |
| "grad_norm": 3.689655065536499, | |
| "learning_rate": 0.00019245283018867927, | |
| "loss": 0.7619, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.13490725126475547, | |
| "grad_norm": 6.728384017944336, | |
| "learning_rate": 0.00019155435759209347, | |
| "loss": 0.7244, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.14839797639123103, | |
| "grad_norm": 4.880130290985107, | |
| "learning_rate": 0.00019065588499550765, | |
| "loss": 0.6931, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.16188870151770657, | |
| "grad_norm": 10.14767074584961, | |
| "learning_rate": 0.00018975741239892185, | |
| "loss": 0.6995, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.17537942664418213, | |
| "grad_norm": 10.897403717041016, | |
| "learning_rate": 0.00018885893980233605, | |
| "loss": 0.69, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.18887015177065766, | |
| "grad_norm": 7.759915351867676, | |
| "learning_rate": 0.00018796046720575023, | |
| "loss": 0.6899, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.20236087689713322, | |
| "grad_norm": 7.006256580352783, | |
| "learning_rate": 0.00018706199460916443, | |
| "loss": 0.6739, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.21585160202360876, | |
| "grad_norm": 5.777213096618652, | |
| "learning_rate": 0.00018616352201257863, | |
| "loss": 0.6731, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.22934232715008432, | |
| "grad_norm": 10.224111557006836, | |
| "learning_rate": 0.00018526504941599283, | |
| "loss": 0.6417, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.24283305227655985, | |
| "grad_norm": 6.332294464111328, | |
| "learning_rate": 0.000184366576819407, | |
| "loss": 0.6452, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2563237774030354, | |
| "grad_norm": 3.9472005367279053, | |
| "learning_rate": 0.0001834681042228212, | |
| "loss": 0.6271, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.26981450252951095, | |
| "grad_norm": 6.640835762023926, | |
| "learning_rate": 0.0001825696316262354, | |
| "loss": 0.6161, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.28330522765598654, | |
| "grad_norm": 5.6504316329956055, | |
| "learning_rate": 0.00018167115902964959, | |
| "loss": 0.6452, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.29679595278246207, | |
| "grad_norm": 3.4219472408294678, | |
| "learning_rate": 0.0001807726864330638, | |
| "loss": 0.6228, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3102866779089376, | |
| "grad_norm": 7.479050636291504, | |
| "learning_rate": 0.000179874213836478, | |
| "loss": 0.6035, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.32377740303541314, | |
| "grad_norm": 6.84128475189209, | |
| "learning_rate": 0.0001789757412398922, | |
| "loss": 0.5763, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3372681281618887, | |
| "grad_norm": 3.407183885574341, | |
| "learning_rate": 0.0001780772686433064, | |
| "loss": 0.5981, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.35075885328836426, | |
| "grad_norm": 6.634527683258057, | |
| "learning_rate": 0.0001771787960467206, | |
| "loss": 0.6313, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3642495784148398, | |
| "grad_norm": 6.274175643920898, | |
| "learning_rate": 0.0001762803234501348, | |
| "loss": 0.5971, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.3777403035413153, | |
| "grad_norm": 5.464880466461182, | |
| "learning_rate": 0.00017538185085354897, | |
| "loss": 0.629, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.3912310286677909, | |
| "grad_norm": 7.9421234130859375, | |
| "learning_rate": 0.00017448337825696318, | |
| "loss": 0.5604, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.40472175379426645, | |
| "grad_norm": 6.513714790344238, | |
| "learning_rate": 0.00017358490566037738, | |
| "loss": 0.5442, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.418212478920742, | |
| "grad_norm": 4.856393337249756, | |
| "learning_rate": 0.00017268643306379155, | |
| "loss": 0.5444, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.4317032040472175, | |
| "grad_norm": 7.252712249755859, | |
| "learning_rate": 0.00017178796046720576, | |
| "loss": 0.6071, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.4451939291736931, | |
| "grad_norm": 6.8953328132629395, | |
| "learning_rate": 0.00017088948787061996, | |
| "loss": 0.5622, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.45868465430016864, | |
| "grad_norm": 6.220505237579346, | |
| "learning_rate": 0.00016999101527403416, | |
| "loss": 0.6213, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.47217537942664417, | |
| "grad_norm": 1.9960452318191528, | |
| "learning_rate": 0.00016909254267744833, | |
| "loss": 0.5752, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4856661045531197, | |
| "grad_norm": 4.8189778327941895, | |
| "learning_rate": 0.00016819407008086254, | |
| "loss": 0.5695, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4991568296795953, | |
| "grad_norm": 2.157482624053955, | |
| "learning_rate": 0.00016729559748427674, | |
| "loss": 0.5818, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5126475548060708, | |
| "grad_norm": 6.034278392791748, | |
| "learning_rate": 0.00016639712488769091, | |
| "loss": 0.5632, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5261382799325464, | |
| "grad_norm": 3.6643030643463135, | |
| "learning_rate": 0.00016549865229110512, | |
| "loss": 0.5149, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.5396290050590219, | |
| "grad_norm": 3.4874682426452637, | |
| "learning_rate": 0.00016460017969451932, | |
| "loss": 0.5712, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5531197301854974, | |
| "grad_norm": 5.94505500793457, | |
| "learning_rate": 0.00016370170709793352, | |
| "loss": 0.5579, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.5666104553119731, | |
| "grad_norm": 2.6858978271484375, | |
| "learning_rate": 0.00016280323450134772, | |
| "loss": 0.5601, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.5801011804384486, | |
| "grad_norm": 0.5919632315635681, | |
| "learning_rate": 0.00016190476190476192, | |
| "loss": 0.5122, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5935919055649241, | |
| "grad_norm": 9.723355293273926, | |
| "learning_rate": 0.00016100628930817613, | |
| "loss": 0.5408, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6070826306913997, | |
| "grad_norm": 3.421534776687622, | |
| "learning_rate": 0.0001601078167115903, | |
| "loss": 0.5125, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6205733558178752, | |
| "grad_norm": 1.350819706916809, | |
| "learning_rate": 0.0001592093441150045, | |
| "loss": 0.5443, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.6340640809443507, | |
| "grad_norm": 2.9618473052978516, | |
| "learning_rate": 0.0001583108715184187, | |
| "loss": 0.5792, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.6475548060708263, | |
| "grad_norm": 11.16501522064209, | |
| "learning_rate": 0.00015741239892183288, | |
| "loss": 0.5499, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.6610455311973018, | |
| "grad_norm": 2.7122786045074463, | |
| "learning_rate": 0.00015651392632524708, | |
| "loss": 0.589, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6745362563237775, | |
| "grad_norm": 6.2460408210754395, | |
| "learning_rate": 0.00015561545372866128, | |
| "loss": 0.5422, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.688026981450253, | |
| "grad_norm": 12.943184852600098, | |
| "learning_rate": 0.0001547169811320755, | |
| "loss": 0.5431, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.7015177065767285, | |
| "grad_norm": 2.4169883728027344, | |
| "learning_rate": 0.00015381850853548966, | |
| "loss": 0.4984, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.715008431703204, | |
| "grad_norm": 5.108154773712158, | |
| "learning_rate": 0.00015292003593890386, | |
| "loss": 0.4795, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.7284991568296796, | |
| "grad_norm": 3.268669605255127, | |
| "learning_rate": 0.00015202156334231807, | |
| "loss": 0.4961, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7419898819561551, | |
| "grad_norm": 1.7926982641220093, | |
| "learning_rate": 0.00015112309074573224, | |
| "loss": 0.5339, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.7554806070826307, | |
| "grad_norm": 3.6382663249969482, | |
| "learning_rate": 0.00015022461814914644, | |
| "loss": 0.4855, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.7689713322091062, | |
| "grad_norm": 8.025957107543945, | |
| "learning_rate": 0.00014932614555256067, | |
| "loss": 0.4901, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.7824620573355818, | |
| "grad_norm": 4.220882892608643, | |
| "learning_rate": 0.00014842767295597485, | |
| "loss": 0.5448, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7959527824620574, | |
| "grad_norm": 6.42817497253418, | |
| "learning_rate": 0.00014752920035938905, | |
| "loss": 0.4969, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.8094435075885329, | |
| "grad_norm": 4.182610988616943, | |
| "learning_rate": 0.00014663072776280325, | |
| "loss": 0.4863, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.8229342327150084, | |
| "grad_norm": 3.687934637069702, | |
| "learning_rate": 0.00014573225516621745, | |
| "loss": 0.5162, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.836424957841484, | |
| "grad_norm": 3.8240537643432617, | |
| "learning_rate": 0.00014483378256963163, | |
| "loss": 0.5089, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.8499156829679595, | |
| "grad_norm": 3.3823423385620117, | |
| "learning_rate": 0.00014393530997304583, | |
| "loss": 0.5447, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.863406408094435, | |
| "grad_norm": 4.591256618499756, | |
| "learning_rate": 0.00014303683737646003, | |
| "loss": 0.5067, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.8768971332209107, | |
| "grad_norm": 4.414494514465332, | |
| "learning_rate": 0.0001421383647798742, | |
| "loss": 0.4858, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.8903878583473862, | |
| "grad_norm": 3.4649970531463623, | |
| "learning_rate": 0.0001412398921832884, | |
| "loss": 0.5243, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.9038785834738617, | |
| "grad_norm": 4.813696384429932, | |
| "learning_rate": 0.0001403414195867026, | |
| "loss": 0.51, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.9173693086003373, | |
| "grad_norm": 4.597005844116211, | |
| "learning_rate": 0.00013944294699011681, | |
| "loss": 0.5079, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.9308600337268128, | |
| "grad_norm": 4.291003704071045, | |
| "learning_rate": 0.000138544474393531, | |
| "loss": 0.4966, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.9443507588532883, | |
| "grad_norm": 3.8949058055877686, | |
| "learning_rate": 0.0001376460017969452, | |
| "loss": 0.5086, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.9578414839797639, | |
| "grad_norm": 4.685634613037109, | |
| "learning_rate": 0.0001367475292003594, | |
| "loss": 0.5005, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.9713322091062394, | |
| "grad_norm": 3.9277801513671875, | |
| "learning_rate": 0.0001358490566037736, | |
| "loss": 0.4759, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.984822934232715, | |
| "grad_norm": 4.3011345863342285, | |
| "learning_rate": 0.0001349505840071878, | |
| "loss": 0.4371, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.9983136593591906, | |
| "grad_norm": 3.370706796646118, | |
| "learning_rate": 0.000134052111410602, | |
| "loss": 0.4811, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.0107925801011803, | |
| "grad_norm": 5.607315540313721, | |
| "learning_rate": 0.00013315363881401617, | |
| "loss": 0.4648, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.024283305227656, | |
| "grad_norm": 3.9047558307647705, | |
| "learning_rate": 0.00013225516621743038, | |
| "loss": 0.4813, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.0377740303541316, | |
| "grad_norm": 3.686000108718872, | |
| "learning_rate": 0.00013135669362084458, | |
| "loss": 0.5189, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.0512647554806072, | |
| "grad_norm": 3.8528940677642822, | |
| "learning_rate": 0.00013045822102425878, | |
| "loss": 0.4761, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.0647554806070827, | |
| "grad_norm": 3.5714054107666016, | |
| "learning_rate": 0.00012955974842767296, | |
| "loss": 0.4845, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.0782462057335582, | |
| "grad_norm": 3.3482840061187744, | |
| "learning_rate": 0.00012866127583108716, | |
| "loss": 0.487, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.0917369308600338, | |
| "grad_norm": 4.3598127365112305, | |
| "learning_rate": 0.00012776280323450136, | |
| "loss": 0.4918, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.1052276559865093, | |
| "grad_norm": 3.4356210231781006, | |
| "learning_rate": 0.00012686433063791554, | |
| "loss": 0.5042, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.1187183811129848, | |
| "grad_norm": 3.015895366668701, | |
| "learning_rate": 0.00012596585804132974, | |
| "loss": 0.4911, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.1322091062394604, | |
| "grad_norm": 3.4911601543426514, | |
| "learning_rate": 0.00012506738544474394, | |
| "loss": 0.507, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.145699831365936, | |
| "grad_norm": 5.718015670776367, | |
| "learning_rate": 0.00012416891284815814, | |
| "loss": 0.4668, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.1591905564924114, | |
| "grad_norm": 4.448844909667969, | |
| "learning_rate": 0.00012327044025157232, | |
| "loss": 0.465, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.172681281618887, | |
| "grad_norm": 3.243500232696533, | |
| "learning_rate": 0.00012237196765498652, | |
| "loss": 0.4922, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.1861720067453625, | |
| "grad_norm": 3.25435471534729, | |
| "learning_rate": 0.00012147349505840073, | |
| "loss": 0.4825, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.199662731871838, | |
| "grad_norm": 3.0708346366882324, | |
| "learning_rate": 0.00012057502246181491, | |
| "loss": 0.4777, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.2131534569983136, | |
| "grad_norm": 3.274693727493286, | |
| "learning_rate": 0.00011967654986522911, | |
| "loss": 0.4727, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.2266441821247893, | |
| "grad_norm": 5.614912986755371, | |
| "learning_rate": 0.00011877807726864331, | |
| "loss": 0.4914, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.2401349072512649, | |
| "grad_norm": 3.7809228897094727, | |
| "learning_rate": 0.0001178796046720575, | |
| "loss": 0.4727, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.2536256323777404, | |
| "grad_norm": 3.46541428565979, | |
| "learning_rate": 0.0001169811320754717, | |
| "loss": 0.4799, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.267116357504216, | |
| "grad_norm": 3.8525471687316895, | |
| "learning_rate": 0.0001160826594788859, | |
| "loss": 0.5079, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.2806070826306915, | |
| "grad_norm": 3.959237575531006, | |
| "learning_rate": 0.00011518418688230011, | |
| "loss": 0.4689, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.294097807757167, | |
| "grad_norm": 3.873572826385498, | |
| "learning_rate": 0.00011428571428571428, | |
| "loss": 0.4688, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.3075885328836425, | |
| "grad_norm": 3.813100814819336, | |
| "learning_rate": 0.00011338724168912849, | |
| "loss": 0.4762, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.321079258010118, | |
| "grad_norm": 3.517953634262085, | |
| "learning_rate": 0.00011248876909254269, | |
| "loss": 0.4595, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.3345699831365936, | |
| "grad_norm": 3.5959513187408447, | |
| "learning_rate": 0.00011159029649595688, | |
| "loss": 0.4959, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.3480607082630691, | |
| "grad_norm": 3.9473278522491455, | |
| "learning_rate": 0.00011069182389937108, | |
| "loss": 0.4552, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.3615514333895447, | |
| "grad_norm": 3.5634729862213135, | |
| "learning_rate": 0.00010979335130278528, | |
| "loss": 0.4765, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.3750421585160202, | |
| "grad_norm": 3.88102126121521, | |
| "learning_rate": 0.00010889487870619948, | |
| "loss": 0.4282, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.3885328836424957, | |
| "grad_norm": 4.591094970703125, | |
| "learning_rate": 0.00010799640610961366, | |
| "loss": 0.483, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.4020236087689713, | |
| "grad_norm": 2.726210355758667, | |
| "learning_rate": 0.00010709793351302786, | |
| "loss": 0.5491, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.4155143338954468, | |
| "grad_norm": 2.8899638652801514, | |
| "learning_rate": 0.00010619946091644206, | |
| "loss": 0.4494, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.4290050590219225, | |
| "grad_norm": 3.1624226570129395, | |
| "learning_rate": 0.00010530098831985624, | |
| "loss": 0.4861, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.4424957841483979, | |
| "grad_norm": 4.227782726287842, | |
| "learning_rate": 0.00010440251572327044, | |
| "loss": 0.4771, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.4559865092748736, | |
| "grad_norm": 3.1170289516448975, | |
| "learning_rate": 0.00010350404312668464, | |
| "loss": 0.4608, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.4694772344013491, | |
| "grad_norm": 3.7711753845214844, | |
| "learning_rate": 0.00010260557053009883, | |
| "loss": 0.4553, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.4829679595278247, | |
| "grad_norm": 3.723172187805176, | |
| "learning_rate": 0.00010170709793351303, | |
| "loss": 0.4347, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.4964586846543002, | |
| "grad_norm": 3.469057083129883, | |
| "learning_rate": 0.00010080862533692723, | |
| "loss": 0.4747, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.5099494097807757, | |
| "grad_norm": 3.3788669109344482, | |
| "learning_rate": 9.991015274034142e-05, | |
| "loss": 0.4788, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.5234401349072513, | |
| "grad_norm": 5.321389198303223, | |
| "learning_rate": 9.901168014375562e-05, | |
| "loss": 0.4424, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.5369308600337268, | |
| "grad_norm": 3.616950511932373, | |
| "learning_rate": 9.811320754716981e-05, | |
| "loss": 0.4765, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.5504215851602023, | |
| "grad_norm": 6.907515525817871, | |
| "learning_rate": 9.7214734950584e-05, | |
| "loss": 0.4797, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.5639123102866779, | |
| "grad_norm": 17.670085906982422, | |
| "learning_rate": 9.63162623539982e-05, | |
| "loss": 0.4531, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.5774030354131534, | |
| "grad_norm": 2.698268413543701, | |
| "learning_rate": 9.54177897574124e-05, | |
| "loss": 0.493, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.590893760539629, | |
| "grad_norm": 6.46640682220459, | |
| "learning_rate": 9.451931716082661e-05, | |
| "loss": 0.4616, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.6043844856661047, | |
| "grad_norm": 2.7934491634368896, | |
| "learning_rate": 9.36208445642408e-05, | |
| "loss": 0.4819, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.61787521079258, | |
| "grad_norm": 4.599141597747803, | |
| "learning_rate": 9.272237196765498e-05, | |
| "loss": 0.4506, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.6313659359190558, | |
| "grad_norm": 3.3172645568847656, | |
| "learning_rate": 9.182389937106919e-05, | |
| "loss": 0.477, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.644856661045531, | |
| "grad_norm": 3.431093215942383, | |
| "learning_rate": 9.092542677448338e-05, | |
| "loss": 0.4594, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.6583473861720068, | |
| "grad_norm": 3.2188451290130615, | |
| "learning_rate": 9.002695417789758e-05, | |
| "loss": 0.4311, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.6718381112984821, | |
| "grad_norm": 4.311905384063721, | |
| "learning_rate": 8.912848158131178e-05, | |
| "loss": 0.4608, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.685328836424958, | |
| "grad_norm": 2.9855377674102783, | |
| "learning_rate": 8.823000898472597e-05, | |
| "loss": 0.5016, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.6988195615514334, | |
| "grad_norm": 4.071963310241699, | |
| "learning_rate": 8.733153638814017e-05, | |
| "loss": 0.4684, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.712310286677909, | |
| "grad_norm": 3.417339563369751, | |
| "learning_rate": 8.643306379155436e-05, | |
| "loss": 0.4364, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.7258010118043845, | |
| "grad_norm": 3.9395434856414795, | |
| "learning_rate": 8.553459119496856e-05, | |
| "loss": 0.5122, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.73929173693086, | |
| "grad_norm": 2.934011220932007, | |
| "learning_rate": 8.463611859838275e-05, | |
| "loss": 0.4572, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.7527824620573356, | |
| "grad_norm": 3.452453851699829, | |
| "learning_rate": 8.373764600179695e-05, | |
| "loss": 0.4674, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.766273187183811, | |
| "grad_norm": 3.5324788093566895, | |
| "learning_rate": 8.283917340521114e-05, | |
| "loss": 0.4678, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.7797639123102866, | |
| "grad_norm": 3.4225003719329834, | |
| "learning_rate": 8.194070080862534e-05, | |
| "loss": 0.5027, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.7932546374367622, | |
| "grad_norm": 1.9840672016143799, | |
| "learning_rate": 8.104222821203954e-05, | |
| "loss": 0.4977, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.806745362563238, | |
| "grad_norm": 2.443516492843628, | |
| "learning_rate": 8.014375561545373e-05, | |
| "loss": 0.455, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.8202360876897132, | |
| "grad_norm": 2.689674139022827, | |
| "learning_rate": 7.924528301886794e-05, | |
| "loss": 0.4665, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.833726812816189, | |
| "grad_norm": 3.2184505462646484, | |
| "learning_rate": 7.834681042228212e-05, | |
| "loss": 0.475, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.8472175379426643, | |
| "grad_norm": 3.5544068813323975, | |
| "learning_rate": 7.744833782569631e-05, | |
| "loss": 0.4921, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.86070826306914, | |
| "grad_norm": 3.2145285606384277, | |
| "learning_rate": 7.654986522911051e-05, | |
| "loss": 0.4825, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.8741989881956154, | |
| "grad_norm": 3.448352813720703, | |
| "learning_rate": 7.56513926325247e-05, | |
| "loss": 0.4442, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.8876897133220911, | |
| "grad_norm": 3.329658269882202, | |
| "learning_rate": 7.47529200359389e-05, | |
| "loss": 0.4498, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.9011804384485667, | |
| "grad_norm": 2.8206887245178223, | |
| "learning_rate": 7.385444743935311e-05, | |
| "loss": 0.4525, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.9146711635750422, | |
| "grad_norm": 3.067368507385254, | |
| "learning_rate": 7.29559748427673e-05, | |
| "loss": 0.4652, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.9281618887015177, | |
| "grad_norm": 2.902029275894165, | |
| "learning_rate": 7.20575022461815e-05, | |
| "loss": 0.462, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.9416526138279933, | |
| "grad_norm": 2.9392433166503906, | |
| "learning_rate": 7.115902964959569e-05, | |
| "loss": 0.4505, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.9551433389544688, | |
| "grad_norm": 3.328568935394287, | |
| "learning_rate": 7.026055705300989e-05, | |
| "loss": 0.4688, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.9686340640809443, | |
| "grad_norm": 3.0431363582611084, | |
| "learning_rate": 6.936208445642408e-05, | |
| "loss": 0.4749, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.9821247892074199, | |
| "grad_norm": 3.3194100856781006, | |
| "learning_rate": 6.846361185983828e-05, | |
| "loss": 0.4364, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.9956155143338954, | |
| "grad_norm": 3.563108205795288, | |
| "learning_rate": 6.756513926325248e-05, | |
| "loss": 0.4635, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.0080944350758854, | |
| "grad_norm": 2.6294658184051514, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 0.4253, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.0215851602023607, | |
| "grad_norm": 2.6223340034484863, | |
| "learning_rate": 6.576819407008087e-05, | |
| "loss": 0.4534, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.0350758853288364, | |
| "grad_norm": 2.769744396209717, | |
| "learning_rate": 6.486972147349506e-05, | |
| "loss": 0.4168, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.048566610455312, | |
| "grad_norm": 2.9616827964782715, | |
| "learning_rate": 6.397124887690926e-05, | |
| "loss": 0.4464, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.0620573355817875, | |
| "grad_norm": 2.952134132385254, | |
| "learning_rate": 6.307277628032345e-05, | |
| "loss": 0.4814, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.0755480607082633, | |
| "grad_norm": 3.6748759746551514, | |
| "learning_rate": 6.217430368373764e-05, | |
| "loss": 0.4638, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.0890387858347386, | |
| "grad_norm": 1.927171230316162, | |
| "learning_rate": 6.127583108715184e-05, | |
| "loss": 0.4512, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.1025295109612143, | |
| "grad_norm": 2.6556429862976074, | |
| "learning_rate": 6.037735849056604e-05, | |
| "loss": 0.4752, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.1160202360876896, | |
| "grad_norm": 1.954484462738037, | |
| "learning_rate": 5.947888589398024e-05, | |
| "loss": 0.4347, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.1295109612141654, | |
| "grad_norm": 2.986450433731079, | |
| "learning_rate": 5.8580413297394435e-05, | |
| "loss": 0.4278, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.1430016863406407, | |
| "grad_norm": 2.7762913703918457, | |
| "learning_rate": 5.768194070080862e-05, | |
| "loss": 0.4496, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.1564924114671165, | |
| "grad_norm": 3.0169496536254883, | |
| "learning_rate": 5.6783468104222825e-05, | |
| "loss": 0.4359, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.1699831365935918, | |
| "grad_norm": 2.6418251991271973, | |
| "learning_rate": 5.5884995507637014e-05, | |
| "loss": 0.4512, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.1834738617200675, | |
| "grad_norm": 2.8345439434051514, | |
| "learning_rate": 5.4986522911051216e-05, | |
| "loss": 0.4339, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.196964586846543, | |
| "grad_norm": 2.8585574626922607, | |
| "learning_rate": 5.408805031446541e-05, | |
| "loss": 0.4072, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.2104553119730186, | |
| "grad_norm": 1.8751145601272583, | |
| "learning_rate": 5.31895777178796e-05, | |
| "loss": 0.4526, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.223946037099494, | |
| "grad_norm": 2.441622734069824, | |
| "learning_rate": 5.22911051212938e-05, | |
| "loss": 0.4758, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.2374367622259697, | |
| "grad_norm": 3.11625337600708, | |
| "learning_rate": 5.1392632524708e-05, | |
| "loss": 0.4705, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.2509274873524454, | |
| "grad_norm": 2.761462688446045, | |
| "learning_rate": 5.04941599281222e-05, | |
| "loss": 0.4439, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.2644182124789207, | |
| "grad_norm": 2.8460707664489746, | |
| "learning_rate": 4.959568733153639e-05, | |
| "loss": 0.4595, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.2779089376053965, | |
| "grad_norm": 2.73833966255188, | |
| "learning_rate": 4.869721473495058e-05, | |
| "loss": 0.436, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.291399662731872, | |
| "grad_norm": 3.1314399242401123, | |
| "learning_rate": 4.7798742138364785e-05, | |
| "loss": 0.4302, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.3048903878583475, | |
| "grad_norm": 2.203801155090332, | |
| "learning_rate": 4.690026954177898e-05, | |
| "loss": 0.4353, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.318381112984823, | |
| "grad_norm": 1.5654208660125732, | |
| "learning_rate": 4.6001796945193176e-05, | |
| "loss": 0.4436, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.3318718381112986, | |
| "grad_norm": 1.8968826532363892, | |
| "learning_rate": 4.5103324348607365e-05, | |
| "loss": 0.4617, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.345362563237774, | |
| "grad_norm": 3.4319088459014893, | |
| "learning_rate": 4.420485175202157e-05, | |
| "loss": 0.4951, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.3588532883642497, | |
| "grad_norm": 0.9367527365684509, | |
| "learning_rate": 4.330637915543576e-05, | |
| "loss": 0.4469, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.372344013490725, | |
| "grad_norm": 1.3041300773620605, | |
| "learning_rate": 4.240790655884996e-05, | |
| "loss": 0.4696, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.3858347386172007, | |
| "grad_norm": 1.0405113697052002, | |
| "learning_rate": 4.150943396226415e-05, | |
| "loss": 0.4121, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.399325463743676, | |
| "grad_norm": 1.1513006687164307, | |
| "learning_rate": 4.061096136567835e-05, | |
| "loss": 0.4134, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.412816188870152, | |
| "grad_norm": 1.3874084949493408, | |
| "learning_rate": 3.971248876909254e-05, | |
| "loss": 0.4375, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.426306913996627, | |
| "grad_norm": 1.29698646068573, | |
| "learning_rate": 3.881401617250674e-05, | |
| "loss": 0.4752, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.439797639123103, | |
| "grad_norm": 0.8246452808380127, | |
| "learning_rate": 3.7915543575920934e-05, | |
| "loss": 0.4738, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.4532883642495786, | |
| "grad_norm": 1.246367335319519, | |
| "learning_rate": 3.7017070979335136e-05, | |
| "loss": 0.4583, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.466779089376054, | |
| "grad_norm": 2.288325071334839, | |
| "learning_rate": 3.611859838274933e-05, | |
| "loss": 0.4643, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.4802698145025297, | |
| "grad_norm": 2.6386337280273438, | |
| "learning_rate": 3.522012578616352e-05, | |
| "loss": 0.4748, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.493760539629005, | |
| "grad_norm": 2.788741111755371, | |
| "learning_rate": 3.4321653189577715e-05, | |
| "loss": 0.4623, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.5072512647554808, | |
| "grad_norm": 3.7216169834136963, | |
| "learning_rate": 3.342318059299192e-05, | |
| "loss": 0.441, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.520741989881956, | |
| "grad_norm": 2.007139205932617, | |
| "learning_rate": 3.252470799640611e-05, | |
| "loss": 0.4383, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.534232715008432, | |
| "grad_norm": 0.6199157238006592, | |
| "learning_rate": 3.162623539982031e-05, | |
| "loss": 0.4197, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.547723440134907, | |
| "grad_norm": 0.895648181438446, | |
| "learning_rate": 3.0727762803234503e-05, | |
| "loss": 0.4403, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.561214165261383, | |
| "grad_norm": 0.6603314280509949, | |
| "learning_rate": 2.9829290206648695e-05, | |
| "loss": 0.4794, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.574704890387858, | |
| "grad_norm": 1.2536957263946533, | |
| "learning_rate": 2.8930817610062894e-05, | |
| "loss": 0.4778, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 2.588195615514334, | |
| "grad_norm": 0.6780521869659424, | |
| "learning_rate": 2.803234501347709e-05, | |
| "loss": 0.4672, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 2.6016863406408093, | |
| "grad_norm": 0.6663461327552795, | |
| "learning_rate": 2.7133872416891288e-05, | |
| "loss": 0.4212, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.615177065767285, | |
| "grad_norm": 1.3189208507537842, | |
| "learning_rate": 2.6235399820305483e-05, | |
| "loss": 0.4366, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.6286677908937603, | |
| "grad_norm": 0.6126547455787659, | |
| "learning_rate": 2.5336927223719675e-05, | |
| "loss": 0.4467, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.642158516020236, | |
| "grad_norm": 0.8297685980796814, | |
| "learning_rate": 2.4438454627133874e-05, | |
| "loss": 0.4203, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.655649241146712, | |
| "grad_norm": 0.81744784116745, | |
| "learning_rate": 2.353998203054807e-05, | |
| "loss": 0.4714, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.669139966273187, | |
| "grad_norm": 1.0496429204940796, | |
| "learning_rate": 2.2641509433962265e-05, | |
| "loss": 0.4517, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.6826306913996625, | |
| "grad_norm": 0.843941330909729, | |
| "learning_rate": 2.174303683737646e-05, | |
| "loss": 0.4361, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.6961214165261382, | |
| "grad_norm": 0.7860977649688721, | |
| "learning_rate": 2.0844564240790655e-05, | |
| "loss": 0.4061, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.709612141652614, | |
| "grad_norm": 1.3650184869766235, | |
| "learning_rate": 1.9946091644204854e-05, | |
| "loss": 0.4641, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.7231028667790893, | |
| "grad_norm": 0.623533308506012, | |
| "learning_rate": 1.9047619047619046e-05, | |
| "loss": 0.4296, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.736593591905565, | |
| "grad_norm": 1.0236366987228394, | |
| "learning_rate": 1.8149146451033245e-05, | |
| "loss": 0.449, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.7500843170320404, | |
| "grad_norm": 0.6305617690086365, | |
| "learning_rate": 1.725067385444744e-05, | |
| "loss": 0.4888, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.763575042158516, | |
| "grad_norm": 0.5004448294639587, | |
| "learning_rate": 1.6352201257861635e-05, | |
| "loss": 0.4227, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.7770657672849914, | |
| "grad_norm": 0.6496240496635437, | |
| "learning_rate": 1.545372866127583e-05, | |
| "loss": 0.4499, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.790556492411467, | |
| "grad_norm": 0.9616618752479553, | |
| "learning_rate": 1.455525606469003e-05, | |
| "loss": 0.4368, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.8040472175379425, | |
| "grad_norm": 0.5831645727157593, | |
| "learning_rate": 1.3656783468104223e-05, | |
| "loss": 0.4452, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.8175379426644183, | |
| "grad_norm": 0.8784683346748352, | |
| "learning_rate": 1.275831087151842e-05, | |
| "loss": 0.4698, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.8310286677908936, | |
| "grad_norm": 1.3440258502960205, | |
| "learning_rate": 1.1859838274932616e-05, | |
| "loss": 0.4195, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.8445193929173693, | |
| "grad_norm": 1.2359696626663208, | |
| "learning_rate": 1.0961365678346811e-05, | |
| "loss": 0.4305, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.858010118043845, | |
| "grad_norm": 1.1054702997207642, | |
| "learning_rate": 1.0062893081761008e-05, | |
| "loss": 0.4649, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.8715008431703204, | |
| "grad_norm": 1.379691481590271, | |
| "learning_rate": 9.164420485175203e-06, | |
| "loss": 0.4876, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.8849915682967957, | |
| "grad_norm": 1.6011934280395508, | |
| "learning_rate": 8.265947888589399e-06, | |
| "loss": 0.4759, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.8984822934232715, | |
| "grad_norm": 1.1230287551879883, | |
| "learning_rate": 7.367475292003594e-06, | |
| "loss": 0.4569, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.911973018549747, | |
| "grad_norm": 0.5642672181129456, | |
| "learning_rate": 6.46900269541779e-06, | |
| "loss": 0.4302, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.9254637436762225, | |
| "grad_norm": 0.5524741411209106, | |
| "learning_rate": 5.570530098831986e-06, | |
| "loss": 0.4062, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.9389544688026983, | |
| "grad_norm": 0.5504957437515259, | |
| "learning_rate": 4.6720575022461816e-06, | |
| "loss": 0.4336, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.9524451939291736, | |
| "grad_norm": 0.9138615131378174, | |
| "learning_rate": 3.7735849056603773e-06, | |
| "loss": 0.4463, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.9659359190556494, | |
| "grad_norm": 0.5804033875465393, | |
| "learning_rate": 2.8751123090745735e-06, | |
| "loss": 0.4323, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.9794266441821247, | |
| "grad_norm": 0.8893300890922546, | |
| "learning_rate": 1.9766397124887693e-06, | |
| "loss": 0.4244, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.9929173693086004, | |
| "grad_norm": 0.6132921576499939, | |
| "learning_rate": 1.078167115902965e-06, | |
| "loss": 0.4562, | |
| "step": 2220 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 2226, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.107103129778258e+17, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |