| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 20.771513353115726, | |
| "eval_steps": 500, | |
| "global_step": 7000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02967359050445104, | |
| "grad_norm": 1.9033336639404297, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.2317, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05934718100890208, | |
| "grad_norm": 1.0404284000396729, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.1977, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.08902077151335312, | |
| "grad_norm": 0.6579734683036804, | |
| "learning_rate": 6e-06, | |
| "loss": 0.1451, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.11869436201780416, | |
| "grad_norm": 0.33155006170272827, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.0959, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.14836795252225518, | |
| "grad_norm": 0.5317391753196716, | |
| "learning_rate": 1e-05, | |
| "loss": 0.0828, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.17804154302670624, | |
| "grad_norm": 0.45179909467697144, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.0814, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.20771513353115728, | |
| "grad_norm": 0.2707938849925995, | |
| "learning_rate": 1.4000000000000001e-05, | |
| "loss": 0.0562, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.23738872403560832, | |
| "grad_norm": 0.22402559220790863, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.0594, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.26706231454005935, | |
| "grad_norm": 0.16533811390399933, | |
| "learning_rate": 1.8e-05, | |
| "loss": 0.0529, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.29673590504451036, | |
| "grad_norm": 0.222530797123909, | |
| "learning_rate": 2e-05, | |
| "loss": 0.0522, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3264094955489614, | |
| "grad_norm": 0.1894129067659378, | |
| "learning_rate": 2.2000000000000003e-05, | |
| "loss": 0.0514, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3560830860534125, | |
| "grad_norm": 0.20559543371200562, | |
| "learning_rate": 2.4e-05, | |
| "loss": 0.0462, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3857566765578635, | |
| "grad_norm": 0.157830610871315, | |
| "learning_rate": 2.6000000000000002e-05, | |
| "loss": 0.0471, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.41543026706231456, | |
| "grad_norm": 0.14663924276828766, | |
| "learning_rate": 2.8000000000000003e-05, | |
| "loss": 0.0449, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.44510385756676557, | |
| "grad_norm": 0.14772620797157288, | |
| "learning_rate": 3e-05, | |
| "loss": 0.0424, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.47477744807121663, | |
| "grad_norm": 0.16058433055877686, | |
| "learning_rate": 3.2000000000000005e-05, | |
| "loss": 0.0424, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5044510385756676, | |
| "grad_norm": 0.15857172012329102, | |
| "learning_rate": 3.4000000000000007e-05, | |
| "loss": 0.041, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5341246290801187, | |
| "grad_norm": 0.17435680329799652, | |
| "learning_rate": 3.6e-05, | |
| "loss": 0.0408, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5637982195845698, | |
| "grad_norm": 0.1439993977546692, | |
| "learning_rate": 3.8e-05, | |
| "loss": 0.0352, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5934718100890207, | |
| "grad_norm": 0.15629075467586517, | |
| "learning_rate": 4e-05, | |
| "loss": 0.0383, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6231454005934718, | |
| "grad_norm": 0.1610369235277176, | |
| "learning_rate": 4.2e-05, | |
| "loss": 0.0392, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6528189910979229, | |
| "grad_norm": 0.17589861154556274, | |
| "learning_rate": 4.4000000000000006e-05, | |
| "loss": 0.0374, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.6824925816023739, | |
| "grad_norm": 0.19186066091060638, | |
| "learning_rate": 4.600000000000001e-05, | |
| "loss": 0.0358, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.712166172106825, | |
| "grad_norm": 0.1579175740480423, | |
| "learning_rate": 4.8e-05, | |
| "loss": 0.0357, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.7418397626112759, | |
| "grad_norm": 0.17220136523246765, | |
| "learning_rate": 5e-05, | |
| "loss": 0.0334, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.771513353115727, | |
| "grad_norm": 0.18591266870498657, | |
| "learning_rate": 5.2000000000000004e-05, | |
| "loss": 0.0315, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.8011869436201781, | |
| "grad_norm": 0.2341579794883728, | |
| "learning_rate": 5.4000000000000005e-05, | |
| "loss": 0.0375, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.8308605341246291, | |
| "grad_norm": 0.15227168798446655, | |
| "learning_rate": 5.6000000000000006e-05, | |
| "loss": 0.031, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8605341246290801, | |
| "grad_norm": 0.1876339167356491, | |
| "learning_rate": 5.8e-05, | |
| "loss": 0.0371, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.8902077151335311, | |
| "grad_norm": 0.1789393573999405, | |
| "learning_rate": 6e-05, | |
| "loss": 0.0313, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9198813056379822, | |
| "grad_norm": 0.1678636074066162, | |
| "learning_rate": 6.2e-05, | |
| "loss": 0.0349, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.9495548961424333, | |
| "grad_norm": 0.17457032203674316, | |
| "learning_rate": 6.400000000000001e-05, | |
| "loss": 0.0296, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.9792284866468842, | |
| "grad_norm": 0.14290577173233032, | |
| "learning_rate": 6.6e-05, | |
| "loss": 0.0308, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.0089020771513353, | |
| "grad_norm": 0.23601128160953522, | |
| "learning_rate": 6.800000000000001e-05, | |
| "loss": 0.0298, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.0385756676557865, | |
| "grad_norm": 0.14039042592048645, | |
| "learning_rate": 7e-05, | |
| "loss": 0.0262, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.0682492581602374, | |
| "grad_norm": 0.1804966777563095, | |
| "learning_rate": 7.2e-05, | |
| "loss": 0.0284, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.0979228486646884, | |
| "grad_norm": 0.22986947000026703, | |
| "learning_rate": 7.4e-05, | |
| "loss": 0.0308, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.1275964391691395, | |
| "grad_norm": 0.20188020169734955, | |
| "learning_rate": 7.6e-05, | |
| "loss": 0.0261, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.1572700296735905, | |
| "grad_norm": 0.14067409932613373, | |
| "learning_rate": 7.800000000000001e-05, | |
| "loss": 0.028, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.1869436201780414, | |
| "grad_norm": 0.16516339778900146, | |
| "learning_rate": 8e-05, | |
| "loss": 0.0247, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.2166172106824926, | |
| "grad_norm": 0.19918474555015564, | |
| "learning_rate": 8.2e-05, | |
| "loss": 0.0301, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.2462908011869436, | |
| "grad_norm": 0.1878385990858078, | |
| "learning_rate": 8.4e-05, | |
| "loss": 0.0251, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.2759643916913945, | |
| "grad_norm": 0.20107118785381317, | |
| "learning_rate": 8.6e-05, | |
| "loss": 0.0279, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.3056379821958457, | |
| "grad_norm": 0.24616649746894836, | |
| "learning_rate": 8.800000000000001e-05, | |
| "loss": 0.0259, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.3353115727002967, | |
| "grad_norm": 0.19029636681079865, | |
| "learning_rate": 9e-05, | |
| "loss": 0.0262, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.3649851632047478, | |
| "grad_norm": 0.194508358836174, | |
| "learning_rate": 9.200000000000001e-05, | |
| "loss": 0.0275, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.3946587537091988, | |
| "grad_norm": 0.20826251804828644, | |
| "learning_rate": 9.4e-05, | |
| "loss": 0.0289, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.4243323442136497, | |
| "grad_norm": 0.13222843408584595, | |
| "learning_rate": 9.6e-05, | |
| "loss": 0.0249, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.454005934718101, | |
| "grad_norm": 0.13967235386371613, | |
| "learning_rate": 9.8e-05, | |
| "loss": 0.0231, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.4836795252225519, | |
| "grad_norm": 0.21556402742862701, | |
| "learning_rate": 0.0001, | |
| "loss": 0.0232, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.513353115727003, | |
| "grad_norm": 0.2407234013080597, | |
| "learning_rate": 9.999972660400536e-05, | |
| "loss": 0.025, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.543026706231454, | |
| "grad_norm": 0.1544090360403061, | |
| "learning_rate": 9.999890641901125e-05, | |
| "loss": 0.0224, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.572700296735905, | |
| "grad_norm": 0.1930345594882965, | |
| "learning_rate": 9.999753945398704e-05, | |
| "loss": 0.0244, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.6023738872403561, | |
| "grad_norm": 0.2288358211517334, | |
| "learning_rate": 9.99956257238817e-05, | |
| "loss": 0.0223, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.632047477744807, | |
| "grad_norm": 0.2028588354587555, | |
| "learning_rate": 9.999316524962345e-05, | |
| "loss": 0.022, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.6617210682492582, | |
| "grad_norm": 0.17989283800125122, | |
| "learning_rate": 9.999015805811965e-05, | |
| "loss": 0.0201, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.6913946587537092, | |
| "grad_norm": 0.20576386153697968, | |
| "learning_rate": 9.998660418225645e-05, | |
| "loss": 0.0211, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.7210682492581602, | |
| "grad_norm": 0.1263924539089203, | |
| "learning_rate": 9.998250366089848e-05, | |
| "loss": 0.0205, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.7507418397626113, | |
| "grad_norm": 0.23239193856716156, | |
| "learning_rate": 9.997785653888835e-05, | |
| "loss": 0.0223, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.7804154302670623, | |
| "grad_norm": 0.15964201092720032, | |
| "learning_rate": 9.997266286704631e-05, | |
| "loss": 0.0236, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.8100890207715135, | |
| "grad_norm": 0.1998339742422104, | |
| "learning_rate": 9.996692270216947e-05, | |
| "loss": 0.0254, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.8397626112759644, | |
| "grad_norm": 0.25945162773132324, | |
| "learning_rate": 9.996063610703137e-05, | |
| "loss": 0.0207, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.8694362017804154, | |
| "grad_norm": 0.1687825620174408, | |
| "learning_rate": 9.995380315038119e-05, | |
| "loss": 0.0169, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.8991097922848663, | |
| "grad_norm": 0.13454881310462952, | |
| "learning_rate": 9.994642390694308e-05, | |
| "loss": 0.0185, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.9287833827893175, | |
| "grad_norm": 0.1880808174610138, | |
| "learning_rate": 9.993849845741524e-05, | |
| "loss": 0.0199, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.9584569732937687, | |
| "grad_norm": 0.17725640535354614, | |
| "learning_rate": 9.993002688846913e-05, | |
| "loss": 0.019, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.9881305637982196, | |
| "grad_norm": 0.17680853605270386, | |
| "learning_rate": 9.992100929274846e-05, | |
| "loss": 0.0257, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.0178041543026706, | |
| "grad_norm": 0.18133607506752014, | |
| "learning_rate": 9.991144576886823e-05, | |
| "loss": 0.0222, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.0474777448071215, | |
| "grad_norm": 0.21174193918704987, | |
| "learning_rate": 9.990133642141359e-05, | |
| "loss": 0.0192, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.077151335311573, | |
| "grad_norm": 0.15766288340091705, | |
| "learning_rate": 9.989068136093873e-05, | |
| "loss": 0.0199, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.106824925816024, | |
| "grad_norm": 0.19250448048114777, | |
| "learning_rate": 9.987948070396571e-05, | |
| "loss": 0.0252, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.136498516320475, | |
| "grad_norm": 0.1412709653377533, | |
| "learning_rate": 9.986773457298311e-05, | |
| "loss": 0.0186, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.166172106824926, | |
| "grad_norm": 0.2136259824037552, | |
| "learning_rate": 9.985544309644475e-05, | |
| "loss": 0.0204, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.1958456973293767, | |
| "grad_norm": 0.10815251618623734, | |
| "learning_rate": 9.984260640876821e-05, | |
| "loss": 0.0153, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.2255192878338277, | |
| "grad_norm": 0.14663298428058624, | |
| "learning_rate": 9.98292246503335e-05, | |
| "loss": 0.0189, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.255192878338279, | |
| "grad_norm": 0.18529076874256134, | |
| "learning_rate": 9.981529796748134e-05, | |
| "loss": 0.0155, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.28486646884273, | |
| "grad_norm": 0.2349974811077118, | |
| "learning_rate": 9.980082651251175e-05, | |
| "loss": 0.0172, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.314540059347181, | |
| "grad_norm": 0.10778886079788208, | |
| "learning_rate": 9.97858104436822e-05, | |
| "loss": 0.0161, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.344213649851632, | |
| "grad_norm": 0.15675969421863556, | |
| "learning_rate": 9.977024992520602e-05, | |
| "loss": 0.0165, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.373887240356083, | |
| "grad_norm": 0.23468513786792755, | |
| "learning_rate": 9.975414512725057e-05, | |
| "loss": 0.0196, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.4035608308605343, | |
| "grad_norm": 0.1332869678735733, | |
| "learning_rate": 9.973749622593534e-05, | |
| "loss": 0.0193, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.4332344213649852, | |
| "grad_norm": 0.1406887024641037, | |
| "learning_rate": 9.972030340333001e-05, | |
| "loss": 0.0186, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.462908011869436, | |
| "grad_norm": 0.11544730514287949, | |
| "learning_rate": 9.970256684745258e-05, | |
| "loss": 0.0195, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.492581602373887, | |
| "grad_norm": 0.19476240873336792, | |
| "learning_rate": 9.968428675226714e-05, | |
| "loss": 0.0171, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.5222551928783385, | |
| "grad_norm": 0.22309833765029907, | |
| "learning_rate": 9.966546331768191e-05, | |
| "loss": 0.0223, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.551928783382789, | |
| "grad_norm": 0.2214643657207489, | |
| "learning_rate": 9.964609674954696e-05, | |
| "loss": 0.019, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.5816023738872405, | |
| "grad_norm": 0.2298765480518341, | |
| "learning_rate": 9.962618725965196e-05, | |
| "loss": 0.0178, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.6112759643916914, | |
| "grad_norm": 0.1542595475912094, | |
| "learning_rate": 9.96057350657239e-05, | |
| "loss": 0.0152, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.6409495548961424, | |
| "grad_norm": 0.12994691729545593, | |
| "learning_rate": 9.95847403914247e-05, | |
| "loss": 0.0153, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.6706231454005933, | |
| "grad_norm": 0.1726643294095993, | |
| "learning_rate": 9.956320346634876e-05, | |
| "loss": 0.0219, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.7002967359050443, | |
| "grad_norm": 0.18292242288589478, | |
| "learning_rate": 9.954112452602045e-05, | |
| "loss": 0.0137, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.7299703264094957, | |
| "grad_norm": 0.19749417901039124, | |
| "learning_rate": 9.95185038118915e-05, | |
| "loss": 0.0179, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.7596439169139466, | |
| "grad_norm": 0.17808304727077484, | |
| "learning_rate": 9.949534157133844e-05, | |
| "loss": 0.0155, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.7893175074183976, | |
| "grad_norm": 0.13589969277381897, | |
| "learning_rate": 9.94716380576598e-05, | |
| "loss": 0.0143, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.8189910979228485, | |
| "grad_norm": 0.17047159373760223, | |
| "learning_rate": 9.944739353007344e-05, | |
| "loss": 0.0211, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.8486646884272995, | |
| "grad_norm": 0.15535619854927063, | |
| "learning_rate": 9.942260825371358e-05, | |
| "loss": 0.0132, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.878338278931751, | |
| "grad_norm": 0.1609240472316742, | |
| "learning_rate": 9.939728249962807e-05, | |
| "loss": 0.0145, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.908011869436202, | |
| "grad_norm": 0.20487530529499054, | |
| "learning_rate": 9.937141654477528e-05, | |
| "loss": 0.0172, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.9376854599406528, | |
| "grad_norm": 0.12371553480625153, | |
| "learning_rate": 9.934501067202117e-05, | |
| "loss": 0.0191, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.9673590504451037, | |
| "grad_norm": 0.15513016283512115, | |
| "learning_rate": 9.931806517013612e-05, | |
| "loss": 0.0158, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.9970326409495547, | |
| "grad_norm": 0.1723584234714508, | |
| "learning_rate": 9.929058033379181e-05, | |
| "loss": 0.0139, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 3.026706231454006, | |
| "grad_norm": 0.1482209414243698, | |
| "learning_rate": 9.926255646355804e-05, | |
| "loss": 0.0148, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 3.056379821958457, | |
| "grad_norm": 0.15559454262256622, | |
| "learning_rate": 9.923399386589933e-05, | |
| "loss": 0.0147, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 3.086053412462908, | |
| "grad_norm": 0.22350917756557465, | |
| "learning_rate": 9.92048928531717e-05, | |
| "loss": 0.0157, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 3.115727002967359, | |
| "grad_norm": 0.1581738144159317, | |
| "learning_rate": 9.917525374361912e-05, | |
| "loss": 0.0133, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 3.14540059347181, | |
| "grad_norm": 0.20087914168834686, | |
| "learning_rate": 9.914507686137019e-05, | |
| "loss": 0.0208, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 3.1750741839762613, | |
| "grad_norm": 0.16003265976905823, | |
| "learning_rate": 9.911436253643445e-05, | |
| "loss": 0.0168, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 3.2047477744807122, | |
| "grad_norm": 0.15235169231891632, | |
| "learning_rate": 9.90831111046988e-05, | |
| "loss": 0.0168, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 3.234421364985163, | |
| "grad_norm": 0.15660665929317474, | |
| "learning_rate": 9.905132290792394e-05, | |
| "loss": 0.0106, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 3.264094955489614, | |
| "grad_norm": 0.16853424906730652, | |
| "learning_rate": 9.901899829374047e-05, | |
| "loss": 0.0149, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 3.293768545994065, | |
| "grad_norm": 0.1335846185684204, | |
| "learning_rate": 9.89861376156452e-05, | |
| "loss": 0.0168, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 3.3234421364985165, | |
| "grad_norm": 0.20238997042179108, | |
| "learning_rate": 9.895274123299723e-05, | |
| "loss": 0.0154, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 3.3531157270029674, | |
| "grad_norm": 0.22216491401195526, | |
| "learning_rate": 9.891880951101407e-05, | |
| "loss": 0.019, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 3.3827893175074184, | |
| "grad_norm": 0.2017626017332077, | |
| "learning_rate": 9.888434282076758e-05, | |
| "loss": 0.017, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 3.4124629080118694, | |
| "grad_norm": 0.18049117922782898, | |
| "learning_rate": 9.884934153917997e-05, | |
| "loss": 0.0163, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 3.4421364985163203, | |
| "grad_norm": 0.28145721554756165, | |
| "learning_rate": 9.881380604901964e-05, | |
| "loss": 0.0166, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 3.4718100890207717, | |
| "grad_norm": 0.2356300801038742, | |
| "learning_rate": 9.877773673889701e-05, | |
| "loss": 0.0155, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 3.5014836795252227, | |
| "grad_norm": 0.24113395810127258, | |
| "learning_rate": 9.87411340032603e-05, | |
| "loss": 0.0156, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 3.5311572700296736, | |
| "grad_norm": 0.18665863573551178, | |
| "learning_rate": 9.870399824239117e-05, | |
| "loss": 0.0159, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 3.5608308605341246, | |
| "grad_norm": 0.16171567142009735, | |
| "learning_rate": 9.86663298624003e-05, | |
| "loss": 0.0167, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 3.5905044510385755, | |
| "grad_norm": 0.17315839231014252, | |
| "learning_rate": 9.862812927522309e-05, | |
| "loss": 0.017, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 3.620178041543027, | |
| "grad_norm": 0.14727933704853058, | |
| "learning_rate": 9.858939689861506e-05, | |
| "loss": 0.0132, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 3.649851632047478, | |
| "grad_norm": 0.1552547812461853, | |
| "learning_rate": 9.855013315614725e-05, | |
| "loss": 0.0175, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 3.679525222551929, | |
| "grad_norm": 0.1715100109577179, | |
| "learning_rate": 9.851033847720166e-05, | |
| "loss": 0.0139, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 3.7091988130563798, | |
| "grad_norm": 0.16414763033390045, | |
| "learning_rate": 9.847001329696653e-05, | |
| "loss": 0.0189, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 3.7388724035608307, | |
| "grad_norm": 0.1251063048839569, | |
| "learning_rate": 9.842915805643155e-05, | |
| "loss": 0.0145, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 3.768545994065282, | |
| "grad_norm": 0.17011059820652008, | |
| "learning_rate": 9.838777320238312e-05, | |
| "loss": 0.0161, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 3.798219584569733, | |
| "grad_norm": 0.14429537951946259, | |
| "learning_rate": 9.834585918739936e-05, | |
| "loss": 0.0159, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 3.827893175074184, | |
| "grad_norm": 0.138567715883255, | |
| "learning_rate": 9.830341646984521e-05, | |
| "loss": 0.0175, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 3.857566765578635, | |
| "grad_norm": 0.08295896649360657, | |
| "learning_rate": 9.826044551386744e-05, | |
| "loss": 0.0145, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 3.887240356083086, | |
| "grad_norm": 0.0911448523402214, | |
| "learning_rate": 9.821694678938953e-05, | |
| "loss": 0.0134, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 3.9169139465875373, | |
| "grad_norm": 0.14157798886299133, | |
| "learning_rate": 9.817292077210659e-05, | |
| "loss": 0.0158, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 3.9465875370919883, | |
| "grad_norm": 0.17415288090705872, | |
| "learning_rate": 9.812836794348004e-05, | |
| "loss": 0.0125, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 3.9762611275964392, | |
| "grad_norm": 0.22007249295711517, | |
| "learning_rate": 9.808328879073251e-05, | |
| "loss": 0.0153, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 4.005934718100891, | |
| "grad_norm": 0.144961416721344, | |
| "learning_rate": 9.803768380684242e-05, | |
| "loss": 0.0117, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 4.035608308605341, | |
| "grad_norm": 0.14304885268211365, | |
| "learning_rate": 9.799155349053851e-05, | |
| "loss": 0.0138, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 4.0652818991097925, | |
| "grad_norm": 0.18843571841716766, | |
| "learning_rate": 9.794489834629455e-05, | |
| "loss": 0.0106, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 4.094955489614243, | |
| "grad_norm": 0.16858817636966705, | |
| "learning_rate": 9.789771888432375e-05, | |
| "loss": 0.0138, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 4.1246290801186944, | |
| "grad_norm": 0.19177594780921936, | |
| "learning_rate": 9.785001562057309e-05, | |
| "loss": 0.0114, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 4.154302670623146, | |
| "grad_norm": 0.20158767700195312, | |
| "learning_rate": 9.780178907671789e-05, | |
| "loss": 0.0146, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 4.183976261127596, | |
| "grad_norm": 0.17675232887268066, | |
| "learning_rate": 9.775303978015585e-05, | |
| "loss": 0.0116, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 4.213649851632048, | |
| "grad_norm": 0.20077385008335114, | |
| "learning_rate": 9.77037682640015e-05, | |
| "loss": 0.0172, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 4.243323442136498, | |
| "grad_norm": 0.17185665667057037, | |
| "learning_rate": 9.765397506708023e-05, | |
| "loss": 0.0138, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 4.27299703264095, | |
| "grad_norm": 0.1641971468925476, | |
| "learning_rate": 9.760366073392246e-05, | |
| "loss": 0.0145, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 4.302670623145401, | |
| "grad_norm": 0.13757102191448212, | |
| "learning_rate": 9.755282581475769e-05, | |
| "loss": 0.0158, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 4.332344213649852, | |
| "grad_norm": 0.18012432754039764, | |
| "learning_rate": 9.750147086550844e-05, | |
| "loss": 0.0139, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 4.362017804154303, | |
| "grad_norm": 0.1639927178621292, | |
| "learning_rate": 9.744959644778422e-05, | |
| "loss": 0.0142, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 4.3916913946587535, | |
| "grad_norm": 0.17392724752426147, | |
| "learning_rate": 9.739720312887535e-05, | |
| "loss": 0.018, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 4.421364985163205, | |
| "grad_norm": 0.15744361281394958, | |
| "learning_rate": 9.734429148174675e-05, | |
| "loss": 0.0128, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 4.451038575667655, | |
| "grad_norm": 0.12954673171043396, | |
| "learning_rate": 9.729086208503174e-05, | |
| "loss": 0.0188, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 4.480712166172107, | |
| "grad_norm": 0.16809141635894775, | |
| "learning_rate": 9.723691552302562e-05, | |
| "loss": 0.0144, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 4.510385756676558, | |
| "grad_norm": 0.1523902416229248, | |
| "learning_rate": 9.718245238567939e-05, | |
| "loss": 0.0145, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 4.540059347181009, | |
| "grad_norm": 0.26666077971458435, | |
| "learning_rate": 9.712747326859315e-05, | |
| "loss": 0.015, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 4.56973293768546, | |
| "grad_norm": 0.193909153342247, | |
| "learning_rate": 9.707197877300974e-05, | |
| "loss": 0.0167, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 4.5994065281899115, | |
| "grad_norm": 0.17812030017375946, | |
| "learning_rate": 9.701596950580806e-05, | |
| "loss": 0.0157, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 4.629080118694362, | |
| "grad_norm": 0.26170411705970764, | |
| "learning_rate": 9.695944607949649e-05, | |
| "loss": 0.0139, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 4.658753709198813, | |
| "grad_norm": 0.14579689502716064, | |
| "learning_rate": 9.690240911220618e-05, | |
| "loss": 0.013, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 4.688427299703264, | |
| "grad_norm": 0.17479683458805084, | |
| "learning_rate": 9.684485922768422e-05, | |
| "loss": 0.014, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 4.718100890207715, | |
| "grad_norm": 0.15623094141483307, | |
| "learning_rate": 9.6786797055287e-05, | |
| "loss": 0.0151, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 4.747774480712166, | |
| "grad_norm": 0.1983657032251358, | |
| "learning_rate": 9.672822322997305e-05, | |
| "loss": 0.0138, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 4.777448071216617, | |
| "grad_norm": 0.13670101761817932, | |
| "learning_rate": 9.66691383922964e-05, | |
| "loss": 0.0114, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 4.807121661721069, | |
| "grad_norm": 0.13849210739135742, | |
| "learning_rate": 9.660954318839933e-05, | |
| "loss": 0.0105, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 4.836795252225519, | |
| "grad_norm": 0.13254909217357635, | |
| "learning_rate": 9.654943827000548e-05, | |
| "loss": 0.0113, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 4.8664688427299705, | |
| "grad_norm": 0.12954489886760712, | |
| "learning_rate": 9.648882429441257e-05, | |
| "loss": 0.0133, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 4.896142433234421, | |
| "grad_norm": 0.13290920853614807, | |
| "learning_rate": 9.642770192448536e-05, | |
| "loss": 0.0156, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 4.925816023738872, | |
| "grad_norm": 0.13683238625526428, | |
| "learning_rate": 9.636607182864827e-05, | |
| "loss": 0.0096, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 4.955489614243324, | |
| "grad_norm": 0.07962514460086823, | |
| "learning_rate": 9.630393468087818e-05, | |
| "loss": 0.0128, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 4.985163204747774, | |
| "grad_norm": 0.15497808158397675, | |
| "learning_rate": 9.624129116069694e-05, | |
| "loss": 0.014, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 5.014836795252226, | |
| "grad_norm": 0.1419367492198944, | |
| "learning_rate": 9.617814195316411e-05, | |
| "loss": 0.0132, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 5.044510385756676, | |
| "grad_norm": 0.2201174795627594, | |
| "learning_rate": 9.611448774886924e-05, | |
| "loss": 0.0133, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 5.074183976261128, | |
| "grad_norm": 0.16477946937084198, | |
| "learning_rate": 9.605032924392457e-05, | |
| "loss": 0.0131, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 5.103857566765579, | |
| "grad_norm": 0.19834354519844055, | |
| "learning_rate": 9.598566713995718e-05, | |
| "loss": 0.0154, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 5.1335311572700295, | |
| "grad_norm": 0.22880475223064423, | |
| "learning_rate": 9.59205021441015e-05, | |
| "loss": 0.014, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 5.163204747774481, | |
| "grad_norm": 0.16253937780857086, | |
| "learning_rate": 9.58548349689915e-05, | |
| "loss": 0.0122, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 5.192878338278931, | |
| "grad_norm": 0.22121521830558777, | |
| "learning_rate": 9.578866633275288e-05, | |
| "loss": 0.015, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 5.222551928783383, | |
| "grad_norm": 0.182882621884346, | |
| "learning_rate": 9.572199695899522e-05, | |
| "loss": 0.0163, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 5.252225519287834, | |
| "grad_norm": 0.17466451227664948, | |
| "learning_rate": 9.565482757680415e-05, | |
| "loss": 0.0145, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 5.281899109792285, | |
| "grad_norm": 0.12321746349334717, | |
| "learning_rate": 9.558715892073323e-05, | |
| "loss": 0.0127, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 5.311572700296736, | |
| "grad_norm": 0.1869288980960846, | |
| "learning_rate": 9.551899173079607e-05, | |
| "loss": 0.0185, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 5.341246290801187, | |
| "grad_norm": 0.1715238392353058, | |
| "learning_rate": 9.545032675245813e-05, | |
| "loss": 0.0136, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 5.370919881305638, | |
| "grad_norm": 0.20112700760364532, | |
| "learning_rate": 9.538116473662861e-05, | |
| "loss": 0.013, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 5.400593471810089, | |
| "grad_norm": 0.13722355663776398, | |
| "learning_rate": 9.531150643965223e-05, | |
| "loss": 0.0112, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 5.43026706231454, | |
| "grad_norm": 0.2131308764219284, | |
| "learning_rate": 9.524135262330098e-05, | |
| "loss": 0.012, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 5.459940652818991, | |
| "grad_norm": 0.13212528824806213, | |
| "learning_rate": 9.517070405476575e-05, | |
| "loss": 0.0112, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 5.489614243323442, | |
| "grad_norm": 0.1769159734249115, | |
| "learning_rate": 9.509956150664796e-05, | |
| "loss": 0.0122, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 5.519287833827893, | |
| "grad_norm": 0.15795955061912537, | |
| "learning_rate": 9.502792575695112e-05, | |
| "loss": 0.0152, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 5.548961424332344, | |
| "grad_norm": 0.1461247205734253, | |
| "learning_rate": 9.49557975890723e-05, | |
| "loss": 0.0138, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 5.578635014836795, | |
| "grad_norm": 0.12148546427488327, | |
| "learning_rate": 9.488317779179361e-05, | |
| "loss": 0.0105, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 5.6083086053412465, | |
| "grad_norm": 0.17779065668582916, | |
| "learning_rate": 9.481006715927351e-05, | |
| "loss": 0.0107, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 5.637982195845697, | |
| "grad_norm": 0.17717322707176208, | |
| "learning_rate": 9.473646649103818e-05, | |
| "loss": 0.0158, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 5.667655786350148, | |
| "grad_norm": 0.19820888340473175, | |
| "learning_rate": 9.46623765919727e-05, | |
| "loss": 0.0137, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 5.697329376854599, | |
| "grad_norm": 0.15721198916435242, | |
| "learning_rate": 9.458779827231237e-05, | |
| "loss": 0.0119, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 5.72700296735905, | |
| "grad_norm": 0.17158320546150208, | |
| "learning_rate": 9.451273234763371e-05, | |
| "loss": 0.0133, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 5.756676557863502, | |
| "grad_norm": 0.15745575726032257, | |
| "learning_rate": 9.443717963884569e-05, | |
| "loss": 0.0129, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 5.786350148367952, | |
| "grad_norm": 0.17795485258102417, | |
| "learning_rate": 9.43611409721806e-05, | |
| "loss": 0.013, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 5.816023738872404, | |
| "grad_norm": 0.18350425362586975, | |
| "learning_rate": 9.428461717918511e-05, | |
| "loss": 0.0103, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 5.845697329376854, | |
| "grad_norm": 0.1675357222557068, | |
| "learning_rate": 9.420760909671118e-05, | |
| "loss": 0.0149, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 5.8753709198813056, | |
| "grad_norm": 0.1395285427570343, | |
| "learning_rate": 9.413011756690685e-05, | |
| "loss": 0.0174, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 5.905044510385757, | |
| "grad_norm": 0.18694210052490234, | |
| "learning_rate": 9.405214343720707e-05, | |
| "loss": 0.011, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 5.9347181008902075, | |
| "grad_norm": 0.13509497046470642, | |
| "learning_rate": 9.397368756032445e-05, | |
| "loss": 0.0105, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 5.964391691394659, | |
| "grad_norm": 0.1707238107919693, | |
| "learning_rate": 9.389475079423988e-05, | |
| "loss": 0.0099, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 5.994065281899109, | |
| "grad_norm": 0.18797723948955536, | |
| "learning_rate": 9.381533400219318e-05, | |
| "loss": 0.0144, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 6.023738872403561, | |
| "grad_norm": 0.15263915061950684, | |
| "learning_rate": 9.373543805267368e-05, | |
| "loss": 0.0145, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 6.053412462908012, | |
| "grad_norm": 0.10786967724561691, | |
| "learning_rate": 9.365506381941066e-05, | |
| "loss": 0.0167, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 6.083086053412463, | |
| "grad_norm": 0.10059456527233124, | |
| "learning_rate": 9.357421218136386e-05, | |
| "loss": 0.0127, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 6.112759643916914, | |
| "grad_norm": 0.12447630614042282, | |
| "learning_rate": 9.349288402271388e-05, | |
| "loss": 0.0109, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 6.142433234421365, | |
| "grad_norm": 0.14649389684200287, | |
| "learning_rate": 9.341108023285238e-05, | |
| "loss": 0.0115, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 6.172106824925816, | |
| "grad_norm": 0.09072308987379074, | |
| "learning_rate": 9.332880170637252e-05, | |
| "loss": 0.0097, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 6.201780415430267, | |
| "grad_norm": 0.1700124889612198, | |
| "learning_rate": 9.32460493430591e-05, | |
| "loss": 0.0107, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 6.231454005934718, | |
| "grad_norm": 0.13447318971157074, | |
| "learning_rate": 9.316282404787871e-05, | |
| "loss": 0.0104, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 6.261127596439169, | |
| "grad_norm": 0.14388670027256012, | |
| "learning_rate": 9.30791267309698e-05, | |
| "loss": 0.0101, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 6.29080118694362, | |
| "grad_norm": 0.14514364302158356, | |
| "learning_rate": 9.299495830763286e-05, | |
| "loss": 0.0096, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 6.320474777448071, | |
| "grad_norm": 0.09969841688871384, | |
| "learning_rate": 9.291031969832026e-05, | |
| "loss": 0.0115, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 6.350148367952523, | |
| "grad_norm": 0.14247213304042816, | |
| "learning_rate": 9.282521182862629e-05, | |
| "loss": 0.0115, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 6.379821958456973, | |
| "grad_norm": 0.13603922724723816, | |
| "learning_rate": 9.273963562927695e-05, | |
| "loss": 0.0147, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 6.4094955489614245, | |
| "grad_norm": 0.23838986456394196, | |
| "learning_rate": 9.265359203611987e-05, | |
| "loss": 0.0115, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 6.439169139465875, | |
| "grad_norm": 0.11037889868021011, | |
| "learning_rate": 9.256708199011401e-05, | |
| "loss": 0.0096, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 6.468842729970326, | |
| "grad_norm": 0.15438543260097504, | |
| "learning_rate": 9.248010643731935e-05, | |
| "loss": 0.0126, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 6.498516320474778, | |
| "grad_norm": 0.12337090075016022, | |
| "learning_rate": 9.239266632888659e-05, | |
| "loss": 0.0093, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 6.528189910979228, | |
| "grad_norm": 0.09916039556264877, | |
| "learning_rate": 9.230476262104677e-05, | |
| "loss": 0.0114, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 6.55786350148368, | |
| "grad_norm": 0.1173950806260109, | |
| "learning_rate": 9.221639627510076e-05, | |
| "loss": 0.0095, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 6.58753709198813, | |
| "grad_norm": 0.14159461855888367, | |
| "learning_rate": 9.212756825740873e-05, | |
| "loss": 0.0096, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 6.617210682492582, | |
| "grad_norm": 0.14629167318344116, | |
| "learning_rate": 9.20382795393797e-05, | |
| "loss": 0.0118, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 6.646884272997033, | |
| "grad_norm": 0.11691708117723465, | |
| "learning_rate": 9.194853109746074e-05, | |
| "loss": 0.0095, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 6.6765578635014835, | |
| "grad_norm": 0.12816114723682404, | |
| "learning_rate": 9.185832391312644e-05, | |
| "loss": 0.0133, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 6.706231454005935, | |
| "grad_norm": 0.11063099652528763, | |
| "learning_rate": 9.176765897286813e-05, | |
| "loss": 0.0125, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 6.735905044510385, | |
| "grad_norm": 0.12592542171478271, | |
| "learning_rate": 9.167653726818305e-05, | |
| "loss": 0.0097, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 6.765578635014837, | |
| "grad_norm": 0.20816679298877716, | |
| "learning_rate": 9.158495979556358e-05, | |
| "loss": 0.0127, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 6.795252225519288, | |
| "grad_norm": 0.13589587807655334, | |
| "learning_rate": 9.14929275564863e-05, | |
| "loss": 0.0112, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 6.824925816023739, | |
| "grad_norm": 0.17974646389484406, | |
| "learning_rate": 9.140044155740101e-05, | |
| "loss": 0.0088, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 6.85459940652819, | |
| "grad_norm": 0.18915049731731415, | |
| "learning_rate": 9.130750280971978e-05, | |
| "loss": 0.0156, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 6.884272997032641, | |
| "grad_norm": 0.14018063247203827, | |
| "learning_rate": 9.121411232980588e-05, | |
| "loss": 0.0098, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 6.913946587537092, | |
| "grad_norm": 0.13840338587760925, | |
| "learning_rate": 9.112027113896262e-05, | |
| "loss": 0.017, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 6.943620178041543, | |
| "grad_norm": 0.11696403473615646, | |
| "learning_rate": 9.102598026342222e-05, | |
| "loss": 0.0099, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 6.973293768545994, | |
| "grad_norm": 0.13574601709842682, | |
| "learning_rate": 9.093124073433463e-05, | |
| "loss": 0.014, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 7.002967359050445, | |
| "grad_norm": 0.10100409388542175, | |
| "learning_rate": 9.083605358775612e-05, | |
| "loss": 0.0103, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 7.032640949554896, | |
| "grad_norm": 0.16500301659107208, | |
| "learning_rate": 9.074041986463808e-05, | |
| "loss": 0.0109, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 7.062314540059347, | |
| "grad_norm": 0.18439586460590363, | |
| "learning_rate": 9.064434061081562e-05, | |
| "loss": 0.0095, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 7.091988130563799, | |
| "grad_norm": 0.12944291532039642, | |
| "learning_rate": 9.0547816876996e-05, | |
| "loss": 0.0123, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 7.121661721068249, | |
| "grad_norm": 0.1536119282245636, | |
| "learning_rate": 9.045084971874738e-05, | |
| "loss": 0.0125, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 7.1513353115727005, | |
| "grad_norm": 0.1650673747062683, | |
| "learning_rate": 9.035344019648702e-05, | |
| "loss": 0.0092, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 7.181008902077151, | |
| "grad_norm": 0.12277387827634811, | |
| "learning_rate": 9.025558937546988e-05, | |
| "loss": 0.0102, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 7.210682492581602, | |
| "grad_norm": 0.15055687725543976, | |
| "learning_rate": 9.015729832577681e-05, | |
| "loss": 0.0094, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 7.240356083086054, | |
| "grad_norm": 0.16825971007347107, | |
| "learning_rate": 9.005856812230304e-05, | |
| "loss": 0.0116, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 7.270029673590504, | |
| "grad_norm": 0.10691312700510025, | |
| "learning_rate": 8.995939984474624e-05, | |
| "loss": 0.0095, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 7.299703264094956, | |
| "grad_norm": 0.14602239429950714, | |
| "learning_rate": 8.98597945775948e-05, | |
| "loss": 0.0124, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 7.329376854599406, | |
| "grad_norm": 0.13875631988048553, | |
| "learning_rate": 8.975975341011596e-05, | |
| "loss": 0.0106, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 7.359050445103858, | |
| "grad_norm": 0.12208565324544907, | |
| "learning_rate": 8.965927743634391e-05, | |
| "loss": 0.0108, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 7.388724035608309, | |
| "grad_norm": 0.11230789124965668, | |
| "learning_rate": 8.955836775506776e-05, | |
| "loss": 0.0081, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 7.4183976261127595, | |
| "grad_norm": 0.13064904510974884, | |
| "learning_rate": 8.945702546981969e-05, | |
| "loss": 0.0122, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 7.448071216617211, | |
| "grad_norm": 0.16824467480182648, | |
| "learning_rate": 8.935525168886262e-05, | |
| "loss": 0.0112, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 7.4777448071216615, | |
| "grad_norm": 0.11342830210924149, | |
| "learning_rate": 8.92530475251784e-05, | |
| "loss": 0.0103, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 7.507418397626113, | |
| "grad_norm": 0.15296466648578644, | |
| "learning_rate": 8.91504140964553e-05, | |
| "loss": 0.0085, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 7.537091988130564, | |
| "grad_norm": 0.16064001619815826, | |
| "learning_rate": 8.90473525250761e-05, | |
| "loss": 0.0114, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 7.566765578635015, | |
| "grad_norm": 0.10076630860567093, | |
| "learning_rate": 8.894386393810563e-05, | |
| "loss": 0.0144, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 7.596439169139466, | |
| "grad_norm": 0.15510450303554535, | |
| "learning_rate": 8.883994946727849e-05, | |
| "loss": 0.0112, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 7.626112759643917, | |
| "grad_norm": 0.21251456439495087, | |
| "learning_rate": 8.873561024898668e-05, | |
| "loss": 0.0106, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 7.655786350148368, | |
| "grad_norm": 0.17526623606681824, | |
| "learning_rate": 8.863084742426719e-05, | |
| "loss": 0.0113, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 7.6854599406528195, | |
| "grad_norm": 0.12284035235643387, | |
| "learning_rate": 8.852566213878947e-05, | |
| "loss": 0.0114, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 7.71513353115727, | |
| "grad_norm": 0.12916874885559082, | |
| "learning_rate": 8.842005554284296e-05, | |
| "loss": 0.0099, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 7.744807121661721, | |
| "grad_norm": 0.17493458092212677, | |
| "learning_rate": 8.831402879132446e-05, | |
| "loss": 0.0092, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 7.774480712166172, | |
| "grad_norm": 0.12995202839374542, | |
| "learning_rate": 8.820758304372557e-05, | |
| "loss": 0.0104, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 7.804154302670623, | |
| "grad_norm": 0.08063960820436478, | |
| "learning_rate": 8.810071946411989e-05, | |
| "loss": 0.0087, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 7.833827893175075, | |
| "grad_norm": 0.10341209173202515, | |
| "learning_rate": 8.799343922115044e-05, | |
| "loss": 0.0077, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 7.863501483679525, | |
| "grad_norm": 0.108217254281044, | |
| "learning_rate": 8.788574348801675e-05, | |
| "loss": 0.0117, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 7.893175074183977, | |
| "grad_norm": 0.1359342336654663, | |
| "learning_rate": 8.77776334424621e-05, | |
| "loss": 0.0157, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 7.922848664688427, | |
| "grad_norm": 0.13467204570770264, | |
| "learning_rate": 8.766911026676064e-05, | |
| "loss": 0.011, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 7.9525222551928785, | |
| "grad_norm": 0.1321392059326172, | |
| "learning_rate": 8.756017514770443e-05, | |
| "loss": 0.0136, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 7.98219584569733, | |
| "grad_norm": 0.16591744124889374, | |
| "learning_rate": 8.745082927659047e-05, | |
| "loss": 0.0093, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 8.011869436201781, | |
| "grad_norm": 0.14482906460762024, | |
| "learning_rate": 8.73410738492077e-05, | |
| "loss": 0.012, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 8.041543026706231, | |
| "grad_norm": 0.12772271037101746, | |
| "learning_rate": 8.723091006582389e-05, | |
| "loss": 0.0118, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 8.071216617210682, | |
| "grad_norm": 0.12283479422330856, | |
| "learning_rate": 8.71203391311725e-05, | |
| "loss": 0.0104, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 8.100890207715134, | |
| "grad_norm": 0.11549960821866989, | |
| "learning_rate": 8.700936225443959e-05, | |
| "loss": 0.0101, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 8.130563798219585, | |
| "grad_norm": 0.12340424209833145, | |
| "learning_rate": 8.689798064925049e-05, | |
| "loss": 0.0108, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 8.160237388724036, | |
| "grad_norm": 0.13828441500663757, | |
| "learning_rate": 8.678619553365659e-05, | |
| "loss": 0.0084, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 8.189910979228486, | |
| "grad_norm": 0.176404669880867, | |
| "learning_rate": 8.6674008130122e-05, | |
| "loss": 0.0093, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 8.219584569732937, | |
| "grad_norm": 0.12829335033893585, | |
| "learning_rate": 8.656141966551019e-05, | |
| "loss": 0.0095, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 8.249258160237389, | |
| "grad_norm": 0.15259785950183868, | |
| "learning_rate": 8.644843137107059e-05, | |
| "loss": 0.01, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 8.27893175074184, | |
| "grad_norm": 0.17509141564369202, | |
| "learning_rate": 8.633504448242505e-05, | |
| "loss": 0.0086, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 8.308605341246292, | |
| "grad_norm": 0.18248887360095978, | |
| "learning_rate": 8.622126023955446e-05, | |
| "loss": 0.0098, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 8.338278931750741, | |
| "grad_norm": 0.13852570950984955, | |
| "learning_rate": 8.610707988678503e-05, | |
| "loss": 0.0109, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 8.367952522255193, | |
| "grad_norm": 0.15752212703227997, | |
| "learning_rate": 8.599250467277483e-05, | |
| "loss": 0.0088, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 8.397626112759644, | |
| "grad_norm": 0.08893997222185135, | |
| "learning_rate": 8.587753585050004e-05, | |
| "loss": 0.0096, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 8.427299703264095, | |
| "grad_norm": 0.1134849488735199, | |
| "learning_rate": 8.576217467724128e-05, | |
| "loss": 0.0105, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 8.456973293768545, | |
| "grad_norm": 0.18662723898887634, | |
| "learning_rate": 8.564642241456986e-05, | |
| "loss": 0.0102, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 8.486646884272997, | |
| "grad_norm": 0.07733399420976639, | |
| "learning_rate": 8.553028032833397e-05, | |
| "loss": 0.0104, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 8.516320474777448, | |
| "grad_norm": 0.13568611443042755, | |
| "learning_rate": 8.541374968864487e-05, | |
| "loss": 0.0086, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 8.5459940652819, | |
| "grad_norm": 0.07520133256912231, | |
| "learning_rate": 8.529683176986295e-05, | |
| "loss": 0.0084, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 8.57566765578635, | |
| "grad_norm": 0.06504914909601212, | |
| "learning_rate": 8.517952785058385e-05, | |
| "loss": 0.0086, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 8.605341246290802, | |
| "grad_norm": 0.15347328782081604, | |
| "learning_rate": 8.506183921362443e-05, | |
| "loss": 0.0097, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 8.635014836795252, | |
| "grad_norm": 0.15778006613254547, | |
| "learning_rate": 8.494376714600878e-05, | |
| "loss": 0.0157, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 8.664688427299703, | |
| "grad_norm": 0.15846551954746246, | |
| "learning_rate": 8.482531293895412e-05, | |
| "loss": 0.0093, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 8.694362017804155, | |
| "grad_norm": 0.19215027987957, | |
| "learning_rate": 8.470647788785665e-05, | |
| "loss": 0.0101, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 8.724035608308606, | |
| "grad_norm": 0.14507389068603516, | |
| "learning_rate": 8.458726329227747e-05, | |
| "loss": 0.0102, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 8.753709198813056, | |
| "grad_norm": 0.15245003998279572, | |
| "learning_rate": 8.44676704559283e-05, | |
| "loss": 0.009, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 8.783382789317507, | |
| "grad_norm": 0.16085323691368103, | |
| "learning_rate": 8.434770068665723e-05, | |
| "loss": 0.0122, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 8.813056379821958, | |
| "grad_norm": 0.12772567570209503, | |
| "learning_rate": 8.422735529643444e-05, | |
| "loss": 0.012, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 8.84272997032641, | |
| "grad_norm": 0.12985776364803314, | |
| "learning_rate": 8.410663560133784e-05, | |
| "loss": 0.0085, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 8.872403560830861, | |
| "grad_norm": 0.13308796286582947, | |
| "learning_rate": 8.398554292153866e-05, | |
| "loss": 0.0076, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 8.90207715133531, | |
| "grad_norm": 0.18778546154499054, | |
| "learning_rate": 8.386407858128706e-05, | |
| "loss": 0.0116, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 8.931750741839762, | |
| "grad_norm": 0.16211922466754913, | |
| "learning_rate": 8.37422439088976e-05, | |
| "loss": 0.0093, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 8.961424332344214, | |
| "grad_norm": 0.1257450431585312, | |
| "learning_rate": 8.362004023673474e-05, | |
| "loss": 0.0121, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 8.991097922848665, | |
| "grad_norm": 0.21673846244812012, | |
| "learning_rate": 8.349746890119826e-05, | |
| "loss": 0.0087, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 9.020771513353116, | |
| "grad_norm": 0.10423213243484497, | |
| "learning_rate": 8.337453124270863e-05, | |
| "loss": 0.0074, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 9.050445103857566, | |
| "grad_norm": 0.12016414105892181, | |
| "learning_rate": 8.32512286056924e-05, | |
| "loss": 0.0106, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 9.080118694362017, | |
| "grad_norm": 0.13180746138095856, | |
| "learning_rate": 8.31275623385675e-05, | |
| "loss": 0.0108, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 9.109792284866469, | |
| "grad_norm": 0.09843757003545761, | |
| "learning_rate": 8.300353379372834e-05, | |
| "loss": 0.0119, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 9.13946587537092, | |
| "grad_norm": 0.08532913774251938, | |
| "learning_rate": 8.287914432753123e-05, | |
| "loss": 0.009, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 9.169139465875372, | |
| "grad_norm": 0.08058229833841324, | |
| "learning_rate": 8.275439530027948e-05, | |
| "loss": 0.0096, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 9.198813056379821, | |
| "grad_norm": 0.14377473294734955, | |
| "learning_rate": 8.262928807620843e-05, | |
| "loss": 0.0101, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 9.228486646884273, | |
| "grad_norm": 0.12718182802200317, | |
| "learning_rate": 8.250382402347065e-05, | |
| "loss": 0.0124, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 9.258160237388724, | |
| "grad_norm": 0.1333075761795044, | |
| "learning_rate": 8.237800451412095e-05, | |
| "loss": 0.0091, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 9.287833827893175, | |
| "grad_norm": 0.12244340032339096, | |
| "learning_rate": 8.225183092410128e-05, | |
| "loss": 0.0108, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 9.317507418397627, | |
| "grad_norm": 0.14877669513225555, | |
| "learning_rate": 8.212530463322583e-05, | |
| "loss": 0.0106, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 9.347181008902076, | |
| "grad_norm": 0.15959547460079193, | |
| "learning_rate": 8.199842702516583e-05, | |
| "loss": 0.0098, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 9.376854599406528, | |
| "grad_norm": 0.16950540244579315, | |
| "learning_rate": 8.18711994874345e-05, | |
| "loss": 0.0083, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 9.40652818991098, | |
| "grad_norm": 0.12437810003757477, | |
| "learning_rate": 8.174362341137177e-05, | |
| "loss": 0.0101, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 9.43620178041543, | |
| "grad_norm": 0.13770434260368347, | |
| "learning_rate": 8.161570019212921e-05, | |
| "loss": 0.0088, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 9.465875370919882, | |
| "grad_norm": 0.17643623054027557, | |
| "learning_rate": 8.148743122865463e-05, | |
| "loss": 0.0102, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 9.495548961424332, | |
| "grad_norm": 0.14998288452625275, | |
| "learning_rate": 8.135881792367686e-05, | |
| "loss": 0.0119, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 9.525222551928783, | |
| "grad_norm": 0.14738821983337402, | |
| "learning_rate": 8.12298616836904e-05, | |
| "loss": 0.0098, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 9.554896142433234, | |
| "grad_norm": 0.10429758578538895, | |
| "learning_rate": 8.110056391894005e-05, | |
| "loss": 0.0079, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 9.584569732937686, | |
| "grad_norm": 0.12666217982769012, | |
| "learning_rate": 8.097092604340542e-05, | |
| "loss": 0.009, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 9.614243323442137, | |
| "grad_norm": 0.12265059351921082, | |
| "learning_rate": 8.084094947478556e-05, | |
| "loss": 0.0076, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 9.643916913946587, | |
| "grad_norm": 0.11325700581073761, | |
| "learning_rate": 8.07106356344834e-05, | |
| "loss": 0.0071, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 9.673590504451038, | |
| "grad_norm": 0.13584177196025848, | |
| "learning_rate": 8.057998594759022e-05, | |
| "loss": 0.0135, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 9.70326409495549, | |
| "grad_norm": 0.19772425293922424, | |
| "learning_rate": 8.044900184287007e-05, | |
| "loss": 0.0093, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 9.732937685459941, | |
| "grad_norm": 0.11252380162477493, | |
| "learning_rate": 8.031768475274413e-05, | |
| "loss": 0.0081, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 9.762611275964392, | |
| "grad_norm": 0.1141551062464714, | |
| "learning_rate": 8.018603611327504e-05, | |
| "loss": 0.0098, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 9.792284866468842, | |
| "grad_norm": 0.17557533085346222, | |
| "learning_rate": 8.005405736415126e-05, | |
| "loss": 0.0084, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 9.821958456973293, | |
| "grad_norm": 0.1568543016910553, | |
| "learning_rate": 7.992174994867123e-05, | |
| "loss": 0.0085, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 9.851632047477745, | |
| "grad_norm": 0.13577055931091309, | |
| "learning_rate": 7.978911531372765e-05, | |
| "loss": 0.0075, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 9.881305637982196, | |
| "grad_norm": 0.16812065243721008, | |
| "learning_rate": 7.965615490979163e-05, | |
| "loss": 0.0093, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 9.910979228486648, | |
| "grad_norm": 0.1458335816860199, | |
| "learning_rate": 7.952287019089685e-05, | |
| "loss": 0.0082, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 9.940652818991097, | |
| "grad_norm": 0.20839525759220123, | |
| "learning_rate": 7.938926261462366e-05, | |
| "loss": 0.0114, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 9.970326409495549, | |
| "grad_norm": 0.1371382474899292, | |
| "learning_rate": 7.925533364208309e-05, | |
| "loss": 0.0078, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.1349106878042221, | |
| "learning_rate": 7.912108473790092e-05, | |
| "loss": 0.014, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 10.029673590504451, | |
| "grad_norm": 0.12665338814258575, | |
| "learning_rate": 7.898651737020166e-05, | |
| "loss": 0.0104, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 10.059347181008903, | |
| "grad_norm": 0.13897213339805603, | |
| "learning_rate": 7.88516330105925e-05, | |
| "loss": 0.011, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 10.089020771513352, | |
| "grad_norm": 0.16300174593925476, | |
| "learning_rate": 7.871643313414718e-05, | |
| "loss": 0.0126, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 10.118694362017804, | |
| "grad_norm": 0.1468997597694397, | |
| "learning_rate": 7.858091921938988e-05, | |
| "loss": 0.0101, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 10.148367952522255, | |
| "grad_norm": 0.09698769450187683, | |
| "learning_rate": 7.844509274827907e-05, | |
| "loss": 0.0103, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 10.178041543026707, | |
| "grad_norm": 0.09831503033638, | |
| "learning_rate": 7.830895520619128e-05, | |
| "loss": 0.0084, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 10.207715133531158, | |
| "grad_norm": 0.17069242894649506, | |
| "learning_rate": 7.817250808190483e-05, | |
| "loss": 0.0116, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 10.237388724035608, | |
| "grad_norm": 0.1412976086139679, | |
| "learning_rate": 7.803575286758364e-05, | |
| "loss": 0.0074, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 10.267062314540059, | |
| "grad_norm": 0.13389386236667633, | |
| "learning_rate": 7.789869105876083e-05, | |
| "loss": 0.0118, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 10.29673590504451, | |
| "grad_norm": 0.09064219892024994, | |
| "learning_rate": 7.776132415432234e-05, | |
| "loss": 0.0081, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 10.326409495548962, | |
| "grad_norm": 0.09377647191286087, | |
| "learning_rate": 7.762365365649067e-05, | |
| "loss": 0.01, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 10.356083086053413, | |
| "grad_norm": 0.11738749593496323, | |
| "learning_rate": 7.748568107080832e-05, | |
| "loss": 0.009, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 10.385756676557863, | |
| "grad_norm": 0.1620689481496811, | |
| "learning_rate": 7.734740790612136e-05, | |
| "loss": 0.0141, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 10.415430267062314, | |
| "grad_norm": 0.11674781888723373, | |
| "learning_rate": 7.720883567456298e-05, | |
| "loss": 0.0097, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 10.445103857566766, | |
| "grad_norm": 0.10295745730400085, | |
| "learning_rate": 7.70699658915369e-05, | |
| "loss": 0.0091, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 10.474777448071217, | |
| "grad_norm": 0.09909237176179886, | |
| "learning_rate": 7.693080007570084e-05, | |
| "loss": 0.0093, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 10.504451038575668, | |
| "grad_norm": 0.12657557427883148, | |
| "learning_rate": 7.679133974894983e-05, | |
| "loss": 0.0118, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 10.534124629080118, | |
| "grad_norm": 0.08864331245422363, | |
| "learning_rate": 7.66515864363997e-05, | |
| "loss": 0.0091, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 10.56379821958457, | |
| "grad_norm": 0.1489853858947754, | |
| "learning_rate": 7.651154166637025e-05, | |
| "loss": 0.0092, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 10.59347181008902, | |
| "grad_norm": 0.16031557321548462, | |
| "learning_rate": 7.637120697036866e-05, | |
| "loss": 0.0107, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 10.623145400593472, | |
| "grad_norm": 0.09444715827703476, | |
| "learning_rate": 7.623058388307269e-05, | |
| "loss": 0.0114, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 10.652818991097924, | |
| "grad_norm": 0.09349317848682404, | |
| "learning_rate": 7.608967394231387e-05, | |
| "loss": 0.0084, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 10.682492581602373, | |
| "grad_norm": 0.1241699606180191, | |
| "learning_rate": 7.594847868906076e-05, | |
| "loss": 0.011, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 10.712166172106825, | |
| "grad_norm": 0.12074402719736099, | |
| "learning_rate": 7.580699966740201e-05, | |
| "loss": 0.0076, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 10.741839762611276, | |
| "grad_norm": 0.1192033663392067, | |
| "learning_rate": 7.566523842452958e-05, | |
| "loss": 0.0084, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 10.771513353115727, | |
| "grad_norm": 0.09560723602771759, | |
| "learning_rate": 7.552319651072164e-05, | |
| "loss": 0.0089, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 10.801186943620179, | |
| "grad_norm": 0.09700684994459152, | |
| "learning_rate": 7.538087547932585e-05, | |
| "loss": 0.0099, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 10.830860534124628, | |
| "grad_norm": 0.14513616263866425, | |
| "learning_rate": 7.52382768867422e-05, | |
| "loss": 0.0106, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 10.86053412462908, | |
| "grad_norm": 0.07739041745662689, | |
| "learning_rate": 7.509540229240601e-05, | |
| "loss": 0.0066, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 10.890207715133531, | |
| "grad_norm": 0.09094832837581635, | |
| "learning_rate": 7.495225325877103e-05, | |
| "loss": 0.0086, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 10.919881305637983, | |
| "grad_norm": 0.1041349396109581, | |
| "learning_rate": 7.480883135129211e-05, | |
| "loss": 0.0099, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 10.949554896142434, | |
| "grad_norm": 0.13735149800777435, | |
| "learning_rate": 7.466513813840825e-05, | |
| "loss": 0.0108, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 10.979228486646884, | |
| "grad_norm": 0.1874203383922577, | |
| "learning_rate": 7.452117519152542e-05, | |
| "loss": 0.0082, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 11.008902077151335, | |
| "grad_norm": 0.1152455285191536, | |
| "learning_rate": 7.437694408499933e-05, | |
| "loss": 0.0117, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 11.038575667655786, | |
| "grad_norm": 0.1247527226805687, | |
| "learning_rate": 7.423244639611826e-05, | |
| "loss": 0.0098, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 11.068249258160238, | |
| "grad_norm": 0.11642714589834213, | |
| "learning_rate": 7.408768370508576e-05, | |
| "loss": 0.0073, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 11.09792284866469, | |
| "grad_norm": 0.11188193410634995, | |
| "learning_rate": 7.394265759500348e-05, | |
| "loss": 0.0125, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 11.127596439169139, | |
| "grad_norm": 0.08563455194234848, | |
| "learning_rate": 7.379736965185368e-05, | |
| "loss": 0.0092, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 11.15727002967359, | |
| "grad_norm": 0.09015386551618576, | |
| "learning_rate": 7.365182146448205e-05, | |
| "loss": 0.009, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 11.186943620178042, | |
| "grad_norm": 0.13220249116420746, | |
| "learning_rate": 7.350601462458024e-05, | |
| "loss": 0.0116, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 11.216617210682493, | |
| "grad_norm": 0.09610439091920853, | |
| "learning_rate": 7.335995072666848e-05, | |
| "loss": 0.0089, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 11.246290801186944, | |
| "grad_norm": 0.1600012481212616, | |
| "learning_rate": 7.32136313680782e-05, | |
| "loss": 0.0075, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 11.275964391691394, | |
| "grad_norm": 0.10654013603925705, | |
| "learning_rate": 7.30670581489344e-05, | |
| "loss": 0.0101, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 11.305637982195845, | |
| "grad_norm": 0.10944235324859619, | |
| "learning_rate": 7.292023267213835e-05, | |
| "loss": 0.0069, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 11.335311572700297, | |
| "grad_norm": 0.13431453704833984, | |
| "learning_rate": 7.277315654334997e-05, | |
| "loss": 0.0115, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 11.364985163204748, | |
| "grad_norm": 0.09411869198083878, | |
| "learning_rate": 7.262583137097018e-05, | |
| "loss": 0.0114, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 11.3946587537092, | |
| "grad_norm": 0.10903919488191605, | |
| "learning_rate": 7.247825876612353e-05, | |
| "loss": 0.008, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 11.42433234421365, | |
| "grad_norm": 0.1587817668914795, | |
| "learning_rate": 7.233044034264034e-05, | |
| "loss": 0.0082, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 11.4540059347181, | |
| "grad_norm": 0.18822628259658813, | |
| "learning_rate": 7.218237771703921e-05, | |
| "loss": 0.0115, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 11.483679525222552, | |
| "grad_norm": 0.24383868277072906, | |
| "learning_rate": 7.203407250850928e-05, | |
| "loss": 0.0103, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 11.513353115727003, | |
| "grad_norm": 0.1706736832857132, | |
| "learning_rate": 7.188552633889259e-05, | |
| "loss": 0.009, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 11.543026706231455, | |
| "grad_norm": 0.13557754456996918, | |
| "learning_rate": 7.173674083266624e-05, | |
| "loss": 0.0085, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 11.572700296735905, | |
| "grad_norm": 0.20359116792678833, | |
| "learning_rate": 7.158771761692464e-05, | |
| "loss": 0.0104, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 11.602373887240356, | |
| "grad_norm": 0.11189489811658859, | |
| "learning_rate": 7.143845832136188e-05, | |
| "loss": 0.0091, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 11.632047477744807, | |
| "grad_norm": 0.17423279583454132, | |
| "learning_rate": 7.128896457825364e-05, | |
| "loss": 0.0087, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 11.661721068249259, | |
| "grad_norm": 0.13194143772125244, | |
| "learning_rate": 7.113923802243957e-05, | |
| "loss": 0.0093, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 11.691394658753708, | |
| "grad_norm": 0.15409836173057556, | |
| "learning_rate": 7.09892802913053e-05, | |
| "loss": 0.0102, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 11.72106824925816, | |
| "grad_norm": 0.10540139675140381, | |
| "learning_rate": 7.083909302476453e-05, | |
| "loss": 0.0107, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 11.750741839762611, | |
| "grad_norm": 0.15967734158039093, | |
| "learning_rate": 7.068867786524116e-05, | |
| "loss": 0.0082, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 11.780415430267063, | |
| "grad_norm": 0.10224897414445877, | |
| "learning_rate": 7.053803645765128e-05, | |
| "loss": 0.0097, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 11.810089020771514, | |
| "grad_norm": 0.12072591483592987, | |
| "learning_rate": 7.038717044938519e-05, | |
| "loss": 0.008, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 11.839762611275965, | |
| "grad_norm": 0.09004125744104385, | |
| "learning_rate": 7.023608149028937e-05, | |
| "loss": 0.0084, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 11.869436201780415, | |
| "grad_norm": 0.09716668725013733, | |
| "learning_rate": 7.008477123264848e-05, | |
| "loss": 0.0097, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 11.899109792284866, | |
| "grad_norm": 0.11283983290195465, | |
| "learning_rate": 6.993324133116726e-05, | |
| "loss": 0.0088, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 11.928783382789318, | |
| "grad_norm": 0.0980016216635704, | |
| "learning_rate": 6.978149344295242e-05, | |
| "loss": 0.0073, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 11.958456973293769, | |
| "grad_norm": 0.12663109600543976, | |
| "learning_rate": 6.962952922749457e-05, | |
| "loss": 0.0098, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 11.988130563798219, | |
| "grad_norm": 0.08269482851028442, | |
| "learning_rate": 6.947735034665002e-05, | |
| "loss": 0.0097, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 12.01780415430267, | |
| "grad_norm": 0.12581780552864075, | |
| "learning_rate": 6.932495846462261e-05, | |
| "loss": 0.0074, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 12.047477744807122, | |
| "grad_norm": 0.13423371315002441, | |
| "learning_rate": 6.917235524794558e-05, | |
| "loss": 0.0116, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 12.077151335311573, | |
| "grad_norm": 0.08609340339899063, | |
| "learning_rate": 6.901954236546323e-05, | |
| "loss": 0.0092, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 12.106824925816024, | |
| "grad_norm": 0.11274320632219315, | |
| "learning_rate": 6.886652148831279e-05, | |
| "loss": 0.0074, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 12.136498516320474, | |
| "grad_norm": 0.09400057792663574, | |
| "learning_rate": 6.871329428990602e-05, | |
| "loss": 0.0078, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 12.166172106824925, | |
| "grad_norm": 0.11359213292598724, | |
| "learning_rate": 6.855986244591104e-05, | |
| "loss": 0.0101, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 12.195845697329377, | |
| "grad_norm": 0.14520014822483063, | |
| "learning_rate": 6.840622763423391e-05, | |
| "loss": 0.009, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 12.225519287833828, | |
| "grad_norm": 0.14667099714279175, | |
| "learning_rate": 6.825239153500029e-05, | |
| "loss": 0.0091, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 12.25519287833828, | |
| "grad_norm": 0.11558016389608383, | |
| "learning_rate": 6.809835583053715e-05, | |
| "loss": 0.0088, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 12.28486646884273, | |
| "grad_norm": 0.25004813075065613, | |
| "learning_rate": 6.794412220535426e-05, | |
| "loss": 0.0101, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 12.31454005934718, | |
| "grad_norm": 0.17387105524539948, | |
| "learning_rate": 6.778969234612584e-05, | |
| "loss": 0.0079, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 12.344213649851632, | |
| "grad_norm": 0.11986710131168365, | |
| "learning_rate": 6.763506794167208e-05, | |
| "loss": 0.0073, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 12.373887240356083, | |
| "grad_norm": 0.14098893105983734, | |
| "learning_rate": 6.748025068294067e-05, | |
| "loss": 0.0115, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 12.403560830860535, | |
| "grad_norm": 0.17298579216003418, | |
| "learning_rate": 6.732524226298841e-05, | |
| "loss": 0.0098, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 12.433234421364984, | |
| "grad_norm": 0.12529845535755157, | |
| "learning_rate": 6.71700443769625e-05, | |
| "loss": 0.0109, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 12.462908011869436, | |
| "grad_norm": 0.08396193385124207, | |
| "learning_rate": 6.701465872208216e-05, | |
| "loss": 0.007, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 12.492581602373887, | |
| "grad_norm": 0.08979377895593643, | |
| "learning_rate": 6.685908699762002e-05, | |
| "loss": 0.0063, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 12.522255192878339, | |
| "grad_norm": 0.09685163199901581, | |
| "learning_rate": 6.670333090488356e-05, | |
| "loss": 0.0068, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 12.55192878338279, | |
| "grad_norm": 0.07651125639677048, | |
| "learning_rate": 6.654739214719641e-05, | |
| "loss": 0.0067, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 12.58160237388724, | |
| "grad_norm": 0.10274438560009003, | |
| "learning_rate": 6.639127242987988e-05, | |
| "loss": 0.0092, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 12.611275964391691, | |
| "grad_norm": 0.18983325362205505, | |
| "learning_rate": 6.623497346023418e-05, | |
| "loss": 0.0094, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 12.640949554896142, | |
| "grad_norm": 0.09363807737827301, | |
| "learning_rate": 6.607849694751977e-05, | |
| "loss": 0.0109, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 12.670623145400594, | |
| "grad_norm": 0.10365147143602371, | |
| "learning_rate": 6.592184460293877e-05, | |
| "loss": 0.0087, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 12.700296735905045, | |
| "grad_norm": 0.08823329955339432, | |
| "learning_rate": 6.576501813961609e-05, | |
| "loss": 0.0075, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 12.729970326409495, | |
| "grad_norm": 0.1399281769990921, | |
| "learning_rate": 6.56080192725808e-05, | |
| "loss": 0.0073, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 12.759643916913946, | |
| "grad_norm": 0.10064969211816788, | |
| "learning_rate": 6.545084971874738e-05, | |
| "loss": 0.0077, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 12.789317507418398, | |
| "grad_norm": 0.12935389578342438, | |
| "learning_rate": 6.529351119689688e-05, | |
| "loss": 0.0063, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 12.818991097922849, | |
| "grad_norm": 0.0700489729642868, | |
| "learning_rate": 6.513600542765817e-05, | |
| "loss": 0.0085, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 12.8486646884273, | |
| "grad_norm": 0.13894468545913696, | |
| "learning_rate": 6.497833413348909e-05, | |
| "loss": 0.0067, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 12.87833827893175, | |
| "grad_norm": 0.13386406004428864, | |
| "learning_rate": 6.48204990386577e-05, | |
| "loss": 0.0078, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 12.908011869436201, | |
| "grad_norm": 0.18802672624588013, | |
| "learning_rate": 6.466250186922325e-05, | |
| "loss": 0.0072, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 12.937685459940653, | |
| "grad_norm": 0.18399575352668762, | |
| "learning_rate": 6.450434435301751e-05, | |
| "loss": 0.0064, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 12.967359050445104, | |
| "grad_norm": 0.16697198152542114, | |
| "learning_rate": 6.43460282196257e-05, | |
| "loss": 0.0074, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 12.997032640949556, | |
| "grad_norm": 0.16347278654575348, | |
| "learning_rate": 6.418755520036775e-05, | |
| "loss": 0.0071, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 13.026706231454005, | |
| "grad_norm": 0.08685827255249023, | |
| "learning_rate": 6.402892702827916e-05, | |
| "loss": 0.0106, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 13.056379821958457, | |
| "grad_norm": 0.17205415666103363, | |
| "learning_rate": 6.387014543809223e-05, | |
| "loss": 0.012, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 13.086053412462908, | |
| "grad_norm": 0.11940989643335342, | |
| "learning_rate": 6.371121216621698e-05, | |
| "loss": 0.0082, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 13.11572700296736, | |
| "grad_norm": 0.14548015594482422, | |
| "learning_rate": 6.355212895072223e-05, | |
| "loss": 0.0104, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 13.14540059347181, | |
| "grad_norm": 0.10984127968549728, | |
| "learning_rate": 6.339289753131649e-05, | |
| "loss": 0.0091, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 13.17507418397626, | |
| "grad_norm": 0.18613463640213013, | |
| "learning_rate": 6.323351964932908e-05, | |
| "loss": 0.0127, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 13.204747774480712, | |
| "grad_norm": 0.1218784749507904, | |
| "learning_rate": 6.307399704769099e-05, | |
| "loss": 0.0066, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 13.234421364985163, | |
| "grad_norm": 0.11912266910076141, | |
| "learning_rate": 6.291433147091583e-05, | |
| "loss": 0.0087, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 13.264094955489615, | |
| "grad_norm": 0.1327201873064041, | |
| "learning_rate": 6.275452466508077e-05, | |
| "loss": 0.0076, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 13.293768545994066, | |
| "grad_norm": 0.0954451709985733, | |
| "learning_rate": 6.259457837780742e-05, | |
| "loss": 0.0078, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 13.323442136498516, | |
| "grad_norm": 0.11177768558263779, | |
| "learning_rate": 6.243449435824276e-05, | |
| "loss": 0.009, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 13.353115727002967, | |
| "grad_norm": 0.11750178039073944, | |
| "learning_rate": 6.227427435703997e-05, | |
| "loss": 0.008, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 13.382789317507418, | |
| "grad_norm": 0.18307024240493774, | |
| "learning_rate": 6.211392012633932e-05, | |
| "loss": 0.0114, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 13.41246290801187, | |
| "grad_norm": 0.0931352823972702, | |
| "learning_rate": 6.195343341974899e-05, | |
| "loss": 0.0075, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 13.442136498516321, | |
| "grad_norm": 0.10080502182245255, | |
| "learning_rate": 6.179281599232591e-05, | |
| "loss": 0.0081, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 13.47181008902077, | |
| "grad_norm": 0.09596526622772217, | |
| "learning_rate": 6.163206960055651e-05, | |
| "loss": 0.0072, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 13.501483679525222, | |
| "grad_norm": 0.11159328371286392, | |
| "learning_rate": 6.147119600233758e-05, | |
| "loss": 0.007, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 13.531157270029674, | |
| "grad_norm": 0.10898558050394058, | |
| "learning_rate": 6.131019695695702e-05, | |
| "loss": 0.0059, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 13.560830860534125, | |
| "grad_norm": 0.11957506090402603, | |
| "learning_rate": 6.11490742250746e-05, | |
| "loss": 0.0082, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 13.590504451038576, | |
| "grad_norm": 0.15221808850765228, | |
| "learning_rate": 6.0987829568702656e-05, | |
| "loss": 0.0081, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 13.620178041543026, | |
| "grad_norm": 0.10994566231966019, | |
| "learning_rate": 6.0826464751186994e-05, | |
| "loss": 0.0072, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 13.649851632047477, | |
| "grad_norm": 0.12259654700756073, | |
| "learning_rate": 6.066498153718735e-05, | |
| "loss": 0.008, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 13.679525222551929, | |
| "grad_norm": 0.09881018102169037, | |
| "learning_rate": 6.05033816926583e-05, | |
| "loss": 0.0083, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 13.70919881305638, | |
| "grad_norm": 0.09556011855602264, | |
| "learning_rate": 6.034166698482984e-05, | |
| "loss": 0.0096, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 13.738872403560832, | |
| "grad_norm": 0.17756840586662292, | |
| "learning_rate": 6.017983918218812e-05, | |
| "loss": 0.0083, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 13.768545994065281, | |
| "grad_norm": 0.12028778344392776, | |
| "learning_rate": 6.001790005445607e-05, | |
| "loss": 0.0092, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 13.798219584569733, | |
| "grad_norm": 0.16217273473739624, | |
| "learning_rate": 5.985585137257401e-05, | |
| "loss": 0.0088, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 13.827893175074184, | |
| "grad_norm": 0.145747110247612, | |
| "learning_rate": 5.969369490868042e-05, | |
| "loss": 0.0138, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 13.857566765578635, | |
| "grad_norm": 0.16528791189193726, | |
| "learning_rate": 5.953143243609235e-05, | |
| "loss": 0.0082, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 13.887240356083087, | |
| "grad_norm": 0.08141251653432846, | |
| "learning_rate": 5.9369065729286245e-05, | |
| "loss": 0.0056, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 13.916913946587536, | |
| "grad_norm": 0.10877679288387299, | |
| "learning_rate": 5.9206596563878357e-05, | |
| "loss": 0.0098, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 13.946587537091988, | |
| "grad_norm": 0.10972292721271515, | |
| "learning_rate": 5.90440267166055e-05, | |
| "loss": 0.0113, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 13.97626112759644, | |
| "grad_norm": 0.11017335951328278, | |
| "learning_rate": 5.888135796530544e-05, | |
| "loss": 0.0072, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 14.00593471810089, | |
| "grad_norm": 0.1043027862906456, | |
| "learning_rate": 5.871859208889759e-05, | |
| "loss": 0.0082, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 14.035608308605342, | |
| "grad_norm": 0.09205296635627747, | |
| "learning_rate": 5.85557308673635e-05, | |
| "loss": 0.0084, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 14.065281899109792, | |
| "grad_norm": 0.08532586693763733, | |
| "learning_rate": 5.8392776081727385e-05, | |
| "loss": 0.0064, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 14.094955489614243, | |
| "grad_norm": 0.10191354155540466, | |
| "learning_rate": 5.8229729514036705e-05, | |
| "loss": 0.0077, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 14.124629080118694, | |
| "grad_norm": 0.10814640671014786, | |
| "learning_rate": 5.8066592947342555e-05, | |
| "loss": 0.0083, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 14.154302670623146, | |
| "grad_norm": 0.1646806001663208, | |
| "learning_rate": 5.7903368165680327e-05, | |
| "loss": 0.0083, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 14.183976261127597, | |
| "grad_norm": 0.11636976152658463, | |
| "learning_rate": 5.7740056954050084e-05, | |
| "loss": 0.0092, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 14.213649851632047, | |
| "grad_norm": 0.15811985731124878, | |
| "learning_rate": 5.757666109839702e-05, | |
| "loss": 0.0073, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 14.243323442136498, | |
| "grad_norm": 0.18782468140125275, | |
| "learning_rate": 5.74131823855921e-05, | |
| "loss": 0.0101, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 14.27299703264095, | |
| "grad_norm": 0.11287040263414383, | |
| "learning_rate": 5.72496226034123e-05, | |
| "loss": 0.0071, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 14.302670623145401, | |
| "grad_norm": 0.06242251396179199, | |
| "learning_rate": 5.7085983540521216e-05, | |
| "loss": 0.0055, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 14.332344213649852, | |
| "grad_norm": 0.11765594780445099, | |
| "learning_rate": 5.692226698644938e-05, | |
| "loss": 0.0092, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 14.362017804154302, | |
| "grad_norm": 0.11589968204498291, | |
| "learning_rate": 5.675847473157485e-05, | |
| "loss": 0.0147, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 14.391691394658753, | |
| "grad_norm": 0.08261799812316895, | |
| "learning_rate": 5.6594608567103456e-05, | |
| "loss": 0.0092, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 14.421364985163205, | |
| "grad_norm": 0.09985928237438202, | |
| "learning_rate": 5.6430670285049314e-05, | |
| "loss": 0.0085, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 14.451038575667656, | |
| "grad_norm": 0.07024219632148743, | |
| "learning_rate": 5.6266661678215216e-05, | |
| "loss": 0.0073, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 14.480712166172108, | |
| "grad_norm": 0.1330738216638565, | |
| "learning_rate": 5.6102584540173006e-05, | |
| "loss": 0.0107, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 14.510385756676557, | |
| "grad_norm": 0.1905350685119629, | |
| "learning_rate": 5.5938440665244006e-05, | |
| "loss": 0.0077, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 14.540059347181009, | |
| "grad_norm": 0.10635291785001755, | |
| "learning_rate": 5.577423184847932e-05, | |
| "loss": 0.008, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 14.56973293768546, | |
| "grad_norm": 0.09331542998552322, | |
| "learning_rate": 5.560995988564023e-05, | |
| "loss": 0.0072, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 14.599406528189911, | |
| "grad_norm": 0.07317591458559036, | |
| "learning_rate": 5.544562657317863e-05, | |
| "loss": 0.0062, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 14.629080118694361, | |
| "grad_norm": 0.1096150130033493, | |
| "learning_rate": 5.52812337082173e-05, | |
| "loss": 0.0084, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 14.658753709198812, | |
| "grad_norm": 0.08940622955560684, | |
| "learning_rate": 5.511678308853026e-05, | |
| "loss": 0.0068, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 14.688427299703264, | |
| "grad_norm": 0.09628520905971527, | |
| "learning_rate": 5.495227651252315e-05, | |
| "loss": 0.0078, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 14.718100890207715, | |
| "grad_norm": 0.08508996665477753, | |
| "learning_rate": 5.478771577921351e-05, | |
| "loss": 0.0055, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 14.747774480712167, | |
| "grad_norm": 0.15108130872249603, | |
| "learning_rate": 5.462310268821118e-05, | |
| "loss": 0.0111, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 14.777448071216618, | |
| "grad_norm": 0.12836958467960358, | |
| "learning_rate": 5.445843903969854e-05, | |
| "loss": 0.0111, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 14.807121661721068, | |
| "grad_norm": 0.1240241602063179, | |
| "learning_rate": 5.4293726634410855e-05, | |
| "loss": 0.007, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 14.836795252225519, | |
| "grad_norm": 0.1312558501958847, | |
| "learning_rate": 5.4128967273616625e-05, | |
| "loss": 0.0084, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 14.86646884272997, | |
| "grad_norm": 0.07452099025249481, | |
| "learning_rate": 5.396416275909779e-05, | |
| "loss": 0.0097, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 14.896142433234422, | |
| "grad_norm": 0.10130845010280609, | |
| "learning_rate": 5.379931489313016e-05, | |
| "loss": 0.0076, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 14.925816023738872, | |
| "grad_norm": 0.08438223600387573, | |
| "learning_rate": 5.363442547846356e-05, | |
| "loss": 0.0079, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 14.955489614243323, | |
| "grad_norm": 0.10516081005334854, | |
| "learning_rate": 5.3469496318302204e-05, | |
| "loss": 0.007, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 14.985163204747774, | |
| "grad_norm": 0.10331432521343231, | |
| "learning_rate": 5.330452921628497e-05, | |
| "loss": 0.008, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 15.014836795252226, | |
| "grad_norm": 0.12713058292865753, | |
| "learning_rate": 5.313952597646568e-05, | |
| "loss": 0.0079, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 15.044510385756677, | |
| "grad_norm": 0.10389821976423264, | |
| "learning_rate": 5.297448840329329e-05, | |
| "loss": 0.0049, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 15.074183976261127, | |
| "grad_norm": 0.08406785875558853, | |
| "learning_rate": 5.280941830159227e-05, | |
| "loss": 0.0077, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 15.103857566765578, | |
| "grad_norm": 0.09462954849004745, | |
| "learning_rate": 5.264431747654284e-05, | |
| "loss": 0.0078, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 15.13353115727003, | |
| "grad_norm": 0.08634629100561142, | |
| "learning_rate": 5.247918773366112e-05, | |
| "loss": 0.0091, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 15.163204747774481, | |
| "grad_norm": 0.08352985233068466, | |
| "learning_rate": 5.231403087877955e-05, | |
| "loss": 0.0072, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 15.192878338278932, | |
| "grad_norm": 0.10993140935897827, | |
| "learning_rate": 5.214884871802703e-05, | |
| "loss": 0.0116, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 15.222551928783382, | |
| "grad_norm": 0.1299051195383072, | |
| "learning_rate": 5.198364305780922e-05, | |
| "loss": 0.0058, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 15.252225519287833, | |
| "grad_norm": 0.13166804611682892, | |
| "learning_rate": 5.1818415704788725e-05, | |
| "loss": 0.0072, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 15.281899109792285, | |
| "grad_norm": 0.09685414284467697, | |
| "learning_rate": 5.165316846586541e-05, | |
| "loss": 0.0064, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 15.311572700296736, | |
| "grad_norm": 0.11090342700481415, | |
| "learning_rate": 5.148790314815663e-05, | |
| "loss": 0.0087, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 15.341246290801188, | |
| "grad_norm": 0.09668321162462234, | |
| "learning_rate": 5.132262155897739e-05, | |
| "loss": 0.005, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 15.370919881305637, | |
| "grad_norm": 0.10721191018819809, | |
| "learning_rate": 5.1157325505820694e-05, | |
| "loss": 0.0071, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 15.400593471810089, | |
| "grad_norm": 0.142580047249794, | |
| "learning_rate": 5.0992016796337686e-05, | |
| "loss": 0.0093, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 15.43026706231454, | |
| "grad_norm": 0.13017131388187408, | |
| "learning_rate": 5.0826697238317935e-05, | |
| "loss": 0.0099, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 15.459940652818991, | |
| "grad_norm": 0.16075848042964935, | |
| "learning_rate": 5.066136863966963e-05, | |
| "loss": 0.0076, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 15.489614243323443, | |
| "grad_norm": 0.08089852333068848, | |
| "learning_rate": 5.0496032808399815e-05, | |
| "loss": 0.0082, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 15.519287833827892, | |
| "grad_norm": 0.1011849120259285, | |
| "learning_rate": 5.033069155259471e-05, | |
| "loss": 0.0079, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 15.548961424332344, | |
| "grad_norm": 0.11635112762451172, | |
| "learning_rate": 5.016534668039976e-05, | |
| "loss": 0.0065, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 15.578635014836795, | |
| "grad_norm": 0.13171295821666718, | |
| "learning_rate": 5e-05, | |
| "loss": 0.0055, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 15.608308605341247, | |
| "grad_norm": 0.08494381606578827, | |
| "learning_rate": 4.9834653319600246e-05, | |
| "loss": 0.0085, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 15.637982195845698, | |
| "grad_norm": 0.10446976125240326, | |
| "learning_rate": 4.96693084474053e-05, | |
| "loss": 0.0078, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 15.667655786350148, | |
| "grad_norm": 0.0890231505036354, | |
| "learning_rate": 4.950396719160018e-05, | |
| "loss": 0.0074, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 15.697329376854599, | |
| "grad_norm": 0.12522277235984802, | |
| "learning_rate": 4.93386313603304e-05, | |
| "loss": 0.0078, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 15.72700296735905, | |
| "grad_norm": 0.1571093499660492, | |
| "learning_rate": 4.917330276168208e-05, | |
| "loss": 0.0074, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 15.756676557863502, | |
| "grad_norm": 0.062216538935899734, | |
| "learning_rate": 4.9007983203662326e-05, | |
| "loss": 0.0063, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 15.786350148367953, | |
| "grad_norm": 0.1346684992313385, | |
| "learning_rate": 4.884267449417931e-05, | |
| "loss": 0.0076, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 15.816023738872403, | |
| "grad_norm": 0.09010523557662964, | |
| "learning_rate": 4.867737844102261e-05, | |
| "loss": 0.0058, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 15.845697329376854, | |
| "grad_norm": 0.14437784254550934, | |
| "learning_rate": 4.851209685184338e-05, | |
| "loss": 0.0086, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 15.875370919881306, | |
| "grad_norm": 0.11848489195108414, | |
| "learning_rate": 4.834683153413459e-05, | |
| "loss": 0.0066, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 15.905044510385757, | |
| "grad_norm": 0.070223368704319, | |
| "learning_rate": 4.818158429521129e-05, | |
| "loss": 0.0076, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 15.934718100890208, | |
| "grad_norm": 0.14026039838790894, | |
| "learning_rate": 4.801635694219079e-05, | |
| "loss": 0.0088, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 15.964391691394658, | |
| "grad_norm": 0.10169534385204315, | |
| "learning_rate": 4.785115128197298e-05, | |
| "loss": 0.0071, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 15.99406528189911, | |
| "grad_norm": 0.07850392907857895, | |
| "learning_rate": 4.7685969121220456e-05, | |
| "loss": 0.006, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 16.023738872403563, | |
| "grad_norm": 0.10650818794965744, | |
| "learning_rate": 4.7520812266338885e-05, | |
| "loss": 0.007, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 16.05341246290801, | |
| "grad_norm": 0.12273446470499039, | |
| "learning_rate": 4.735568252345718e-05, | |
| "loss": 0.0071, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 16.083086053412462, | |
| "grad_norm": 0.08303070813417435, | |
| "learning_rate": 4.7190581698407725e-05, | |
| "loss": 0.0053, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 16.112759643916913, | |
| "grad_norm": 0.12177737057209015, | |
| "learning_rate": 4.702551159670672e-05, | |
| "loss": 0.0072, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 16.142433234421365, | |
| "grad_norm": 0.08380179852247238, | |
| "learning_rate": 4.6860474023534335e-05, | |
| "loss": 0.0071, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 16.172106824925816, | |
| "grad_norm": 0.18725629150867462, | |
| "learning_rate": 4.669547078371504e-05, | |
| "loss": 0.0109, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 16.201780415430267, | |
| "grad_norm": 0.07537223398685455, | |
| "learning_rate": 4.65305036816978e-05, | |
| "loss": 0.0063, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 16.23145400593472, | |
| "grad_norm": 0.11886244267225266, | |
| "learning_rate": 4.6365574521536445e-05, | |
| "loss": 0.0066, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 16.26112759643917, | |
| "grad_norm": 0.11239882558584213, | |
| "learning_rate": 4.620068510686985e-05, | |
| "loss": 0.0067, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 16.29080118694362, | |
| "grad_norm": 0.08789864927530289, | |
| "learning_rate": 4.60358372409022e-05, | |
| "loss": 0.005, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 16.320474777448073, | |
| "grad_norm": 0.1038408875465393, | |
| "learning_rate": 4.5871032726383386e-05, | |
| "loss": 0.0059, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 16.35014836795252, | |
| "grad_norm": 0.12280593812465668, | |
| "learning_rate": 4.570627336558915e-05, | |
| "loss": 0.0076, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 16.379821958456972, | |
| "grad_norm": 0.07442447543144226, | |
| "learning_rate": 4.554156096030149e-05, | |
| "loss": 0.0058, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 16.409495548961424, | |
| "grad_norm": 0.14733825623989105, | |
| "learning_rate": 4.537689731178883e-05, | |
| "loss": 0.0111, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 16.439169139465875, | |
| "grad_norm": 0.08889064192771912, | |
| "learning_rate": 4.5212284220786494e-05, | |
| "loss": 0.0066, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 16.468842729970326, | |
| "grad_norm": 0.10983613133430481, | |
| "learning_rate": 4.504772348747687e-05, | |
| "loss": 0.005, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 16.498516320474778, | |
| "grad_norm": 0.08122289180755615, | |
| "learning_rate": 4.488321691146975e-05, | |
| "loss": 0.0064, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 16.52818991097923, | |
| "grad_norm": 0.11093121021986008, | |
| "learning_rate": 4.471876629178273e-05, | |
| "loss": 0.0056, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 16.55786350148368, | |
| "grad_norm": 0.07384008914232254, | |
| "learning_rate": 4.4554373426821374e-05, | |
| "loss": 0.0066, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 16.587537091988132, | |
| "grad_norm": 0.0759691372513771, | |
| "learning_rate": 4.439004011435979e-05, | |
| "loss": 0.0067, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 16.617210682492583, | |
| "grad_norm": 0.12558524310588837, | |
| "learning_rate": 4.4225768151520694e-05, | |
| "loss": 0.0082, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 16.64688427299703, | |
| "grad_norm": 0.09405193477869034, | |
| "learning_rate": 4.406155933475599e-05, | |
| "loss": 0.0082, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 16.676557863501483, | |
| "grad_norm": 0.11722928285598755, | |
| "learning_rate": 4.3897415459827e-05, | |
| "loss": 0.0071, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 16.706231454005934, | |
| "grad_norm": 0.06153471767902374, | |
| "learning_rate": 4.373333832178478e-05, | |
| "loss": 0.0057, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 16.735905044510385, | |
| "grad_norm": 0.1048816367983818, | |
| "learning_rate": 4.3569329714950704e-05, | |
| "loss": 0.0105, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 16.765578635014837, | |
| "grad_norm": 0.08989928662776947, | |
| "learning_rate": 4.3405391432896555e-05, | |
| "loss": 0.0045, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 16.795252225519288, | |
| "grad_norm": 0.10757238417863846, | |
| "learning_rate": 4.324152526842517e-05, | |
| "loss": 0.0067, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 16.82492581602374, | |
| "grad_norm": 0.1232636570930481, | |
| "learning_rate": 4.307773301355062e-05, | |
| "loss": 0.0064, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 16.85459940652819, | |
| "grad_norm": 0.11661067605018616, | |
| "learning_rate": 4.291401645947879e-05, | |
| "loss": 0.0066, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 16.884272997032642, | |
| "grad_norm": 0.128203347325325, | |
| "learning_rate": 4.275037739658771e-05, | |
| "loss": 0.0089, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 16.91394658753709, | |
| "grad_norm": 0.05867573991417885, | |
| "learning_rate": 4.2586817614407895e-05, | |
| "loss": 0.0048, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 16.94362017804154, | |
| "grad_norm": 0.10739487409591675, | |
| "learning_rate": 4.2423338901602985e-05, | |
| "loss": 0.0082, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 16.973293768545993, | |
| "grad_norm": 0.15257547795772552, | |
| "learning_rate": 4.2259943045949934e-05, | |
| "loss": 0.0099, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 17.002967359050444, | |
| "grad_norm": 0.11332064867019653, | |
| "learning_rate": 4.209663183431969e-05, | |
| "loss": 0.0091, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 17.032640949554896, | |
| "grad_norm": 0.10195380449295044, | |
| "learning_rate": 4.1933407052657456e-05, | |
| "loss": 0.0064, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 17.062314540059347, | |
| "grad_norm": 0.09771795570850372, | |
| "learning_rate": 4.17702704859633e-05, | |
| "loss": 0.006, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 17.0919881305638, | |
| "grad_norm": 0.067668117582798, | |
| "learning_rate": 4.160722391827262e-05, | |
| "loss": 0.0057, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 17.12166172106825, | |
| "grad_norm": 0.10733453184366226, | |
| "learning_rate": 4.14442691326365e-05, | |
| "loss": 0.0069, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 17.1513353115727, | |
| "grad_norm": 0.08877403289079666, | |
| "learning_rate": 4.1281407911102425e-05, | |
| "loss": 0.0076, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 17.181008902077153, | |
| "grad_norm": 0.09970077127218246, | |
| "learning_rate": 4.111864203469457e-05, | |
| "loss": 0.0081, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 17.2106824925816, | |
| "grad_norm": 0.08427341282367706, | |
| "learning_rate": 4.095597328339452e-05, | |
| "loss": 0.0063, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 17.240356083086052, | |
| "grad_norm": 0.08047015219926834, | |
| "learning_rate": 4.079340343612165e-05, | |
| "loss": 0.0057, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 17.270029673590503, | |
| "grad_norm": 0.09922623634338379, | |
| "learning_rate": 4.063093427071376e-05, | |
| "loss": 0.0057, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 17.299703264094955, | |
| "grad_norm": 0.16388913989067078, | |
| "learning_rate": 4.046856756390767e-05, | |
| "loss": 0.0089, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 17.329376854599406, | |
| "grad_norm": 0.14212121069431305, | |
| "learning_rate": 4.0306305091319595e-05, | |
| "loss": 0.0068, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 17.359050445103858, | |
| "grad_norm": 0.09012001752853394, | |
| "learning_rate": 4.0144148627425993e-05, | |
| "loss": 0.0119, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 17.38872403560831, | |
| "grad_norm": 0.14375264942646027, | |
| "learning_rate": 3.9982099945543945e-05, | |
| "loss": 0.007, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 17.41839762611276, | |
| "grad_norm": 0.0589725561439991, | |
| "learning_rate": 3.982016081781189e-05, | |
| "loss": 0.0058, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 17.448071216617212, | |
| "grad_norm": 0.13382118940353394, | |
| "learning_rate": 3.965833301517017e-05, | |
| "loss": 0.0109, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 17.477744807121663, | |
| "grad_norm": 0.11577240377664566, | |
| "learning_rate": 3.949661830734172e-05, | |
| "loss": 0.0059, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 17.50741839762611, | |
| "grad_norm": 0.11332771927118301, | |
| "learning_rate": 3.933501846281267e-05, | |
| "loss": 0.0081, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 17.537091988130562, | |
| "grad_norm": 0.06491021066904068, | |
| "learning_rate": 3.917353524881302e-05, | |
| "loss": 0.0069, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 17.566765578635014, | |
| "grad_norm": 0.06087639555335045, | |
| "learning_rate": 3.901217043129735e-05, | |
| "loss": 0.0075, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 17.596439169139465, | |
| "grad_norm": 0.09394536167383194, | |
| "learning_rate": 3.8850925774925425e-05, | |
| "loss": 0.0052, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 17.626112759643917, | |
| "grad_norm": 0.07485074549913406, | |
| "learning_rate": 3.8689803043043e-05, | |
| "loss": 0.0066, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 17.655786350148368, | |
| "grad_norm": 0.09653795510530472, | |
| "learning_rate": 3.852880399766243e-05, | |
| "loss": 0.0056, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 17.68545994065282, | |
| "grad_norm": 0.08485693484544754, | |
| "learning_rate": 3.836793039944349e-05, | |
| "loss": 0.0052, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 17.71513353115727, | |
| "grad_norm": 0.060783516615629196, | |
| "learning_rate": 3.820718400767409e-05, | |
| "loss": 0.005, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 17.744807121661722, | |
| "grad_norm": 0.13937251269817352, | |
| "learning_rate": 3.8046566580251e-05, | |
| "loss": 0.0073, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 17.774480712166174, | |
| "grad_norm": 0.10680582374334335, | |
| "learning_rate": 3.788607987366069e-05, | |
| "loss": 0.0062, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 17.80415430267062, | |
| "grad_norm": 0.14509576559066772, | |
| "learning_rate": 3.772572564296005e-05, | |
| "loss": 0.0063, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 17.833827893175073, | |
| "grad_norm": 0.09072288125753403, | |
| "learning_rate": 3.756550564175727e-05, | |
| "loss": 0.0051, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 17.863501483679524, | |
| "grad_norm": 0.06139769405126572, | |
| "learning_rate": 3.74054216221926e-05, | |
| "loss": 0.0061, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 17.893175074183976, | |
| "grad_norm": 0.07538589090108871, | |
| "learning_rate": 3.7245475334919246e-05, | |
| "loss": 0.0047, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 17.922848664688427, | |
| "grad_norm": 0.03345522657036781, | |
| "learning_rate": 3.7085668529084184e-05, | |
| "loss": 0.008, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 17.95252225519288, | |
| "grad_norm": 0.0697244256734848, | |
| "learning_rate": 3.6926002952309016e-05, | |
| "loss": 0.0038, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 17.98219584569733, | |
| "grad_norm": 0.06793000549077988, | |
| "learning_rate": 3.676648035067093e-05, | |
| "loss": 0.0059, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 18.01186943620178, | |
| "grad_norm": 0.0548509806394577, | |
| "learning_rate": 3.6607102468683526e-05, | |
| "loss": 0.0053, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 18.041543026706233, | |
| "grad_norm": 0.0740605965256691, | |
| "learning_rate": 3.6447871049277796e-05, | |
| "loss": 0.0074, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 18.071216617210684, | |
| "grad_norm": 0.04433996602892876, | |
| "learning_rate": 3.628878783378302e-05, | |
| "loss": 0.0057, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 18.100890207715132, | |
| "grad_norm": 0.10380228608846664, | |
| "learning_rate": 3.612985456190778e-05, | |
| "loss": 0.0048, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 18.130563798219583, | |
| "grad_norm": 0.04870785027742386, | |
| "learning_rate": 3.597107297172084e-05, | |
| "loss": 0.0045, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 18.160237388724035, | |
| "grad_norm": 0.08207621425390244, | |
| "learning_rate": 3.581244479963225e-05, | |
| "loss": 0.0069, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 18.189910979228486, | |
| "grad_norm": 0.07084132730960846, | |
| "learning_rate": 3.5653971780374295e-05, | |
| "loss": 0.0063, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 18.219584569732937, | |
| "grad_norm": 0.06766065955162048, | |
| "learning_rate": 3.5495655646982505e-05, | |
| "loss": 0.0047, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 18.24925816023739, | |
| "grad_norm": 0.07428411394357681, | |
| "learning_rate": 3.533749813077677e-05, | |
| "loss": 0.0048, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 18.27893175074184, | |
| "grad_norm": 0.06863471865653992, | |
| "learning_rate": 3.517950096134232e-05, | |
| "loss": 0.0078, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 18.30860534124629, | |
| "grad_norm": 0.07877454161643982, | |
| "learning_rate": 3.5021665866510925e-05, | |
| "loss": 0.0066, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 18.338278931750743, | |
| "grad_norm": 0.10128459334373474, | |
| "learning_rate": 3.4863994572341843e-05, | |
| "loss": 0.007, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 18.367952522255194, | |
| "grad_norm": 0.1272270679473877, | |
| "learning_rate": 3.470648880310313e-05, | |
| "loss": 0.0076, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 18.397626112759642, | |
| "grad_norm": 0.08308117091655731, | |
| "learning_rate": 3.4549150281252636e-05, | |
| "loss": 0.0059, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 18.427299703264094, | |
| "grad_norm": 0.12493477761745453, | |
| "learning_rate": 3.439198072741921e-05, | |
| "loss": 0.0062, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 18.456973293768545, | |
| "grad_norm": 0.08306564390659332, | |
| "learning_rate": 3.423498186038393e-05, | |
| "loss": 0.0062, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 18.486646884272997, | |
| "grad_norm": 0.08139030635356903, | |
| "learning_rate": 3.407815539706124e-05, | |
| "loss": 0.006, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 18.516320474777448, | |
| "grad_norm": 0.1288260817527771, | |
| "learning_rate": 3.392150305248024e-05, | |
| "loss": 0.0057, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 18.5459940652819, | |
| "grad_norm": 0.13427333533763885, | |
| "learning_rate": 3.3765026539765834e-05, | |
| "loss": 0.0083, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 18.57566765578635, | |
| "grad_norm": 0.07845230400562286, | |
| "learning_rate": 3.360872757012011e-05, | |
| "loss": 0.0084, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 18.605341246290802, | |
| "grad_norm": 0.07737909257411957, | |
| "learning_rate": 3.3452607852803584e-05, | |
| "loss": 0.0056, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 18.635014836795254, | |
| "grad_norm": 0.12954828143119812, | |
| "learning_rate": 3.329666909511645e-05, | |
| "loss": 0.0067, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 18.664688427299705, | |
| "grad_norm": 0.07920852303504944, | |
| "learning_rate": 3.3140913002379995e-05, | |
| "loss": 0.0082, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 18.694362017804153, | |
| "grad_norm": 0.08958853781223297, | |
| "learning_rate": 3.298534127791785e-05, | |
| "loss": 0.0051, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 18.724035608308604, | |
| "grad_norm": 0.07667780667543411, | |
| "learning_rate": 3.282995562303754e-05, | |
| "loss": 0.0062, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 18.753709198813056, | |
| "grad_norm": 0.06684096157550812, | |
| "learning_rate": 3.267475773701161e-05, | |
| "loss": 0.0047, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 18.783382789317507, | |
| "grad_norm": 0.06355752050876617, | |
| "learning_rate": 3.251974931705933e-05, | |
| "loss": 0.005, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 18.81305637982196, | |
| "grad_norm": 0.07288695126771927, | |
| "learning_rate": 3.236493205832795e-05, | |
| "loss": 0.0057, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 18.84272997032641, | |
| "grad_norm": 0.06220920756459236, | |
| "learning_rate": 3.221030765387417e-05, | |
| "loss": 0.0045, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 18.87240356083086, | |
| "grad_norm": 0.045754965394735336, | |
| "learning_rate": 3.205587779464576e-05, | |
| "loss": 0.0058, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 18.902077151335313, | |
| "grad_norm": 0.08345388621091843, | |
| "learning_rate": 3.190164416946285e-05, | |
| "loss": 0.0072, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 18.931750741839764, | |
| "grad_norm": 0.08965882658958435, | |
| "learning_rate": 3.1747608464999725e-05, | |
| "loss": 0.0054, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 18.961424332344215, | |
| "grad_norm": 0.09003588557243347, | |
| "learning_rate": 3.1593772365766105e-05, | |
| "loss": 0.0064, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 18.991097922848663, | |
| "grad_norm": 0.09479653835296631, | |
| "learning_rate": 3.144013755408895e-05, | |
| "loss": 0.0056, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 19.020771513353115, | |
| "grad_norm": 0.08929464966058731, | |
| "learning_rate": 3.128670571009399e-05, | |
| "loss": 0.0061, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 19.050445103857566, | |
| "grad_norm": 0.08314875513315201, | |
| "learning_rate": 3.113347851168721e-05, | |
| "loss": 0.0063, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 19.080118694362017, | |
| "grad_norm": 0.07534530013799667, | |
| "learning_rate": 3.098045763453678e-05, | |
| "loss": 0.0043, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 19.10979228486647, | |
| "grad_norm": 0.0726943388581276, | |
| "learning_rate": 3.082764475205442e-05, | |
| "loss": 0.0053, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 19.13946587537092, | |
| "grad_norm": 0.06237198784947395, | |
| "learning_rate": 3.0675041535377405e-05, | |
| "loss": 0.0039, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 19.16913946587537, | |
| "grad_norm": 0.05938175693154335, | |
| "learning_rate": 3.052264965335e-05, | |
| "loss": 0.0051, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 19.198813056379823, | |
| "grad_norm": 0.1082092896103859, | |
| "learning_rate": 3.0370470772505433e-05, | |
| "loss": 0.0065, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 19.228486646884274, | |
| "grad_norm": 0.1327633559703827, | |
| "learning_rate": 3.0218506557047598e-05, | |
| "loss": 0.0055, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 19.258160237388726, | |
| "grad_norm": 0.07017289102077484, | |
| "learning_rate": 3.006675866883275e-05, | |
| "loss": 0.0051, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 19.287833827893174, | |
| "grad_norm": 0.10289981216192245, | |
| "learning_rate": 2.991522876735154e-05, | |
| "loss": 0.0049, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 19.317507418397625, | |
| "grad_norm": 0.1318645030260086, | |
| "learning_rate": 2.976391850971065e-05, | |
| "loss": 0.0067, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 19.347181008902076, | |
| "grad_norm": 0.10172990709543228, | |
| "learning_rate": 2.9612829550614836e-05, | |
| "loss": 0.0088, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 19.376854599406528, | |
| "grad_norm": 0.13238836824893951, | |
| "learning_rate": 2.9461963542348737e-05, | |
| "loss": 0.0061, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 19.40652818991098, | |
| "grad_norm": 0.05801309645175934, | |
| "learning_rate": 2.931132213475884e-05, | |
| "loss": 0.0064, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 19.43620178041543, | |
| "grad_norm": 0.10104802250862122, | |
| "learning_rate": 2.916090697523549e-05, | |
| "loss": 0.0058, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 19.465875370919882, | |
| "grad_norm": 0.07685786485671997, | |
| "learning_rate": 2.9010719708694722e-05, | |
| "loss": 0.0061, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 19.495548961424333, | |
| "grad_norm": 0.12206763029098511, | |
| "learning_rate": 2.8860761977560436e-05, | |
| "loss": 0.0091, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 19.525222551928785, | |
| "grad_norm": 0.08354529738426208, | |
| "learning_rate": 2.8711035421746367e-05, | |
| "loss": 0.0095, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 19.554896142433236, | |
| "grad_norm": 0.14100633561611176, | |
| "learning_rate": 2.8561541678638142e-05, | |
| "loss": 0.0062, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 19.584569732937684, | |
| "grad_norm": 0.08803468197584152, | |
| "learning_rate": 2.8412282383075363e-05, | |
| "loss": 0.0057, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 19.614243323442135, | |
| "grad_norm": 0.05870945379137993, | |
| "learning_rate": 2.8263259167333777e-05, | |
| "loss": 0.0051, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 19.643916913946587, | |
| "grad_norm": 0.09367851912975311, | |
| "learning_rate": 2.811447366110741e-05, | |
| "loss": 0.0056, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 19.673590504451038, | |
| "grad_norm": 0.1148865818977356, | |
| "learning_rate": 2.7965927491490705e-05, | |
| "loss": 0.0074, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 19.70326409495549, | |
| "grad_norm": 0.06443727761507034, | |
| "learning_rate": 2.7817622282960815e-05, | |
| "loss": 0.0057, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 19.73293768545994, | |
| "grad_norm": 0.06887785345315933, | |
| "learning_rate": 2.766955965735968e-05, | |
| "loss": 0.0046, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 19.762611275964392, | |
| "grad_norm": 0.09389995038509369, | |
| "learning_rate": 2.7521741233876496e-05, | |
| "loss": 0.0055, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 19.792284866468844, | |
| "grad_norm": 0.1363907903432846, | |
| "learning_rate": 2.7374168629029813e-05, | |
| "loss": 0.0053, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 19.821958456973295, | |
| "grad_norm": 0.08125721663236618, | |
| "learning_rate": 2.7226843456650037e-05, | |
| "loss": 0.0063, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 19.851632047477743, | |
| "grad_norm": 0.10988637804985046, | |
| "learning_rate": 2.707976732786166e-05, | |
| "loss": 0.0055, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 19.881305637982194, | |
| "grad_norm": 0.11077634245157242, | |
| "learning_rate": 2.693294185106562e-05, | |
| "loss": 0.0051, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 19.910979228486646, | |
| "grad_norm": 0.13356263935565948, | |
| "learning_rate": 2.6786368631921836e-05, | |
| "loss": 0.0053, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 19.940652818991097, | |
| "grad_norm": 0.133230060338974, | |
| "learning_rate": 2.6640049273331515e-05, | |
| "loss": 0.0056, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 19.97032640949555, | |
| "grad_norm": 0.10850989818572998, | |
| "learning_rate": 2.6493985375419778e-05, | |
| "loss": 0.0065, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.08912710100412369, | |
| "learning_rate": 2.6348178535517966e-05, | |
| "loss": 0.007, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 20.02967359050445, | |
| "grad_norm": 0.054864369332790375, | |
| "learning_rate": 2.6202630348146324e-05, | |
| "loss": 0.0049, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 20.059347181008903, | |
| "grad_norm": 0.11972086131572723, | |
| "learning_rate": 2.6057342404996522e-05, | |
| "loss": 0.0052, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 20.089020771513354, | |
| "grad_norm": 0.0904167965054512, | |
| "learning_rate": 2.591231629491423e-05, | |
| "loss": 0.004, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 20.118694362017806, | |
| "grad_norm": 0.04706908389925957, | |
| "learning_rate": 2.5767553603881767e-05, | |
| "loss": 0.0047, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 20.148367952522253, | |
| "grad_norm": 0.044308267533779144, | |
| "learning_rate": 2.562305591500069e-05, | |
| "loss": 0.0047, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 20.178041543026705, | |
| "grad_norm": 0.16575434803962708, | |
| "learning_rate": 2.547882480847461e-05, | |
| "loss": 0.0084, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 20.207715133531156, | |
| "grad_norm": 0.08881547302007675, | |
| "learning_rate": 2.5334861861591753e-05, | |
| "loss": 0.0039, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 20.237388724035608, | |
| "grad_norm": 0.08442889899015427, | |
| "learning_rate": 2.5191168648707887e-05, | |
| "loss": 0.0048, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 20.26706231454006, | |
| "grad_norm": 0.05308566987514496, | |
| "learning_rate": 2.5047746741228978e-05, | |
| "loss": 0.0043, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 20.29673590504451, | |
| "grad_norm": 0.08365131169557571, | |
| "learning_rate": 2.490459770759398e-05, | |
| "loss": 0.005, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 20.326409495548962, | |
| "grad_norm": 0.035201624035835266, | |
| "learning_rate": 2.476172311325783e-05, | |
| "loss": 0.004, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 20.356083086053413, | |
| "grad_norm": 0.05819859728217125, | |
| "learning_rate": 2.4619124520674146e-05, | |
| "loss": 0.0063, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 20.385756676557865, | |
| "grad_norm": 0.12480951100587845, | |
| "learning_rate": 2.447680348927837e-05, | |
| "loss": 0.0077, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 20.415430267062316, | |
| "grad_norm": 0.09439083188772202, | |
| "learning_rate": 2.433476157547044e-05, | |
| "loss": 0.0084, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 20.445103857566764, | |
| "grad_norm": 0.06064935028553009, | |
| "learning_rate": 2.419300033259798e-05, | |
| "loss": 0.006, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 20.474777448071215, | |
| "grad_norm": 0.062327805906534195, | |
| "learning_rate": 2.405152131093926e-05, | |
| "loss": 0.005, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 20.504451038575667, | |
| "grad_norm": 0.08613722771406174, | |
| "learning_rate": 2.3910326057686127e-05, | |
| "loss": 0.0054, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 20.534124629080118, | |
| "grad_norm": 0.1044430062174797, | |
| "learning_rate": 2.3769416116927335e-05, | |
| "loss": 0.0095, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 20.56379821958457, | |
| "grad_norm": 0.05803578346967697, | |
| "learning_rate": 2.362879302963135e-05, | |
| "loss": 0.0039, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 20.59347181008902, | |
| "grad_norm": 0.049366164952516556, | |
| "learning_rate": 2.3488458333629777e-05, | |
| "loss": 0.0036, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 20.623145400593472, | |
| "grad_norm": 0.07159484922885895, | |
| "learning_rate": 2.3348413563600325e-05, | |
| "loss": 0.0069, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 20.652818991097924, | |
| "grad_norm": 0.11064313352108002, | |
| "learning_rate": 2.3208660251050158e-05, | |
| "loss": 0.0052, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 20.682492581602375, | |
| "grad_norm": 0.13685272634029388, | |
| "learning_rate": 2.3069199924299174e-05, | |
| "loss": 0.0069, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 20.712166172106826, | |
| "grad_norm": 0.08148929476737976, | |
| "learning_rate": 2.29300341084631e-05, | |
| "loss": 0.0048, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 20.741839762611274, | |
| "grad_norm": 0.07075554132461548, | |
| "learning_rate": 2.279116432543705e-05, | |
| "loss": 0.0068, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 20.771513353115726, | |
| "grad_norm": 0.10635301470756531, | |
| "learning_rate": 2.2652592093878666e-05, | |
| "loss": 0.0048, | |
| "step": 7000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 30, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 5.543762269621453e+18, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |