| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.999053926206244, | |
| "global_step": 3960, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 6.289308176100629e-06, | |
| "loss": 5.7433, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.2578616352201259e-05, | |
| "loss": 5.2911, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.8867924528301888e-05, | |
| "loss": 4.8586, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.5157232704402517e-05, | |
| "loss": 4.5404, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 3.144654088050314e-05, | |
| "loss": 4.2673, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 3.7735849056603776e-05, | |
| "loss": 4.0794, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.402515723270441e-05, | |
| "loss": 3.9428, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 5.0314465408805034e-05, | |
| "loss": 3.8166, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 5.660377358490566e-05, | |
| "loss": 3.7152, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 6.289308176100629e-05, | |
| "loss": 3.6501, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 6.918238993710691e-05, | |
| "loss": 3.583, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 7.547169811320755e-05, | |
| "loss": 3.5229, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 8.176100628930818e-05, | |
| "loss": 3.4693, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 8.805031446540882e-05, | |
| "loss": 3.4358, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.433962264150944e-05, | |
| "loss": 3.3779, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00010062893081761007, | |
| "loss": 3.3463, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0001069182389937107, | |
| "loss": 3.3215, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00011320754716981132, | |
| "loss": 3.2891, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00011949685534591195, | |
| "loss": 3.2593, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00012578616352201257, | |
| "loss": 3.2389, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001320754716981132, | |
| "loss": 3.208, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00013836477987421382, | |
| "loss": 3.1893, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00014465408805031446, | |
| "loss": 3.1749, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001509433962264151, | |
| "loss": 3.1352, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00015723270440251574, | |
| "loss": 3.1069, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00016352201257861635, | |
| "loss": 3.0995, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.000169811320754717, | |
| "loss": 3.0828, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00017610062893081763, | |
| "loss": 3.0757, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00018238993710691825, | |
| "loss": 3.0406, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00018867924528301889, | |
| "loss": 3.0378, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0001949685534591195, | |
| "loss": 3.0082, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019999996584345097, | |
| "loss": 3.0023, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00019999877036668484, | |
| "loss": 2.9836, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0001999958670858004, | |
| "loss": 2.9812, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00019999125605038058, | |
| "loss": 2.9464, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00019998493733917384, | |
| "loss": 2.9401, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00019997691106009272, | |
| "loss": 2.924, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.000199967177350212, | |
| "loss": 2.9024, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00019995573637576647, | |
| "loss": 2.8813, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001999425883321479, | |
| "loss": 2.8854, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00019992773344390194, | |
| "loss": 2.8847, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00019991117196472405, | |
| "loss": 2.8749, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00019989290417745542, | |
| "loss": 2.8411, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0001998729303940778, | |
| "loss": 2.8315, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00019985125095570855, | |
| "loss": 2.8267, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00019982786623259452, | |
| "loss": 2.8072, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0001998027766241059, | |
| "loss": 2.805, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0001997759825587294, | |
| "loss": 2.7963, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00019974748449406069, | |
| "loss": 2.7773, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0001997172829167969, | |
| "loss": 2.7808, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0001996853783427282, | |
| "loss": 2.7796, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0001996517713167289, | |
| "loss": 2.7296, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001996164624127482, | |
| "loss": 2.7506, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00019957945223380045, | |
| "loss": 2.7212, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0001995407414119548, | |
| "loss": 2.7339, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0001995003306083244, | |
| "loss": 2.7255, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00019945822051305507, | |
| "loss": 2.6909, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0001994144118453136, | |
| "loss": 2.6944, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00019936890535327543, | |
| "loss": 2.6877, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00019932170181411184, | |
| "loss": 2.6761, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001992728020339768, | |
| "loss": 2.6946, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00019922220684799293, | |
| "loss": 2.6618, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0001991699171202376, | |
| "loss": 2.6439, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00019911593374372788, | |
| "loss": 2.656, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.0001990602576404055, | |
| "loss": 2.6382, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00019900288976112084, | |
| "loss": 2.6359, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00019894383108561697, | |
| "loss": 2.6341, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00019888308262251285, | |
| "loss": 2.617, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.000198820645409286, | |
| "loss": 2.6151, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001987565205122547, | |
| "loss": 2.6267, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00019869070902656018, | |
| "loss": 2.6007, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00019862321207614748, | |
| "loss": 2.6052, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00019855403081374648, | |
| "loss": 2.5964, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00019848316642085215, | |
| "loss": 2.5614, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00019841062010770435, | |
| "loss": 2.5562, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0001983363931132673, | |
| "loss": 2.5723, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00019826048670520824, | |
| "loss": 2.5707, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00019818290217987587, | |
| "loss": 2.5718, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00019810364086227819, | |
| "loss": 2.5659, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00019802270410605987, | |
| "loss": 2.5516, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00019794009329347924, | |
| "loss": 2.5571, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00019785580983538444, | |
| "loss": 2.534, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00019776985517118957, | |
| "loss": 2.5348, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00019768223076884996, | |
| "loss": 2.5275, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00019759293812483713, | |
| "loss": 2.5146, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00019750197876411334, | |
| "loss": 2.5079, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00019740935424010538, | |
| "loss": 2.5012, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00019731506613467816, | |
| "loss": 2.5222, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00019721911605810757, | |
| "loss": 2.4999, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00019712150564905317, | |
| "loss": 2.5052, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00019702223657453002, | |
| "loss": 2.4901, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00019692131052988034, | |
| "loss": 2.4705, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.0001968187292387444, | |
| "loss": 2.484, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00019671449445303135, | |
| "loss": 2.4761, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00019660860795288896, | |
| "loss": 2.4538, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00019650107154667353, | |
| "loss": 2.4634, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.0001963918870709188, | |
| "loss": 2.4823, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00019628105639030467, | |
| "loss": 2.4622, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00019616858139762534, | |
| "loss": 2.4447, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.000196054464013757, | |
| "loss": 2.459, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00019593870618762497, | |
| "loss": 2.4341, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00019582130989617051, | |
| "loss": 2.4287, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00019570227714431698, | |
| "loss": 2.4302, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00019558160996493556, | |
| "loss": 2.4237, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.0001954593104188107, | |
| "loss": 2.4327, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00019533538059460475, | |
| "loss": 2.419, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00019520982260882238, | |
| "loss": 2.4153, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00019508263860577442, | |
| "loss": 2.414, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.0001949538307575412, | |
| "loss": 2.4247, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00019482340126393552, | |
| "loss": 2.3946, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00019469135235246503, | |
| "loss": 2.4084, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00019455768627829422, | |
| "loss": 2.3901, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00019442240532420584, | |
| "loss": 2.3726, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00019428551180056204, | |
| "loss": 2.3769, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.0001941470080452648, | |
| "loss": 2.3772, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00019400689642371606, | |
| "loss": 2.3811, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00019386517932877724, | |
| "loss": 2.3667, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.0001937218591807285, | |
| "loss": 2.3726, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00019357693842722736, | |
| "loss": 2.3663, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.0001934304195432668, | |
| "loss": 2.3675, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00019328230503113316, | |
| "loss": 2.3655, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00019313259742036328, | |
| "loss": 2.368, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00019298129926770133, | |
| "loss": 2.3418, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00019282841315705515, | |
| "loss": 2.3395, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00019267394169945217, | |
| "loss": 2.3583, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.0001925178875329947, | |
| "loss": 2.3253, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00019236025332281507, | |
| "loss": 2.3288, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00019220104176102985, | |
| "loss": 2.3621, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0001920402555666941, | |
| "loss": 2.3427, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00019187789748575481, | |
| "loss": 2.3394, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00019171397029100416, | |
| "loss": 2.3375, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.00019154847678203184, | |
| "loss": 2.3196, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.00019138141978517764, | |
| "loss": 2.3157, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00019121280215348286, | |
| "loss": 2.2923, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.0001910426267666418, | |
| "loss": 2.3152, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00019087089653095252, | |
| "loss": 2.3209, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.000190697614379267, | |
| "loss": 2.31, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00019052278327094145, | |
| "loss": 2.3068, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0001903464061917854, | |
| "loss": 2.2994, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00019016848615401095, | |
| "loss": 2.3125, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00018998902619618116, | |
| "loss": 2.3146, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00018980802938315826, | |
| "loss": 2.2878, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00018962549880605133, | |
| "loss": 2.2895, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00018944143758216337, | |
| "loss": 2.3017, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00018925584885493823, | |
| "loss": 2.2918, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00018906873579390681, | |
| "loss": 2.2867, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00018888010159463293, | |
| "loss": 2.2766, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00018868994947865883, | |
| "loss": 2.2821, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00018849828269345016, | |
| "loss": 2.2982, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00018830510451234037, | |
| "loss": 2.2653, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00018811041823447498, | |
| "loss": 2.2641, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00018791422718475516, | |
| "loss": 2.2767, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.0001877165347137809, | |
| "loss": 2.2804, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.0001875173441977939, | |
| "loss": 2.2629, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00018731665903861985, | |
| "loss": 2.2589, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00018711448266361025, | |
| "loss": 2.2489, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00018691081852558407, | |
| "loss": 2.2529, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00018670567010276858, | |
| "loss": 2.2726, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.0001864990408987401, | |
| "loss": 2.2599, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00018629093444236402, | |
| "loss": 2.2437, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00018608135428773478, | |
| "loss": 2.2449, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.0001858703040141148, | |
| "loss": 2.2381, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.0001856577872258737, | |
| "loss": 2.2333, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.0001854438075524266, | |
| "loss": 2.2305, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.0001852283686481721, | |
| "loss": 2.2371, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.0001850114741924299, | |
| "loss": 2.2247, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.000184793127889378, | |
| "loss": 2.235, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.0001845733334679894, | |
| "loss": 2.2326, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00018435209468196847, | |
| "loss": 2.2293, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00018412941530968676, | |
| "loss": 2.2337, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00018390529915411852, | |
| "loss": 2.2283, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00018367975004277573, | |
| "loss": 2.2207, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00018345277182764278, | |
| "loss": 2.2269, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00018322436838511064, | |
| "loss": 2.2116, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00018299454361591066, | |
| "loss": 2.2105, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00018276330144504803, | |
| "loss": 2.2061, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00018253064582173455, | |
| "loss": 2.2157, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00018229658071932146, | |
| "loss": 2.2018, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00018206111013523133, | |
| "loss": 2.2106, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00018182423809088997, | |
| "loss": 2.2141, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00018158596863165763, | |
| "loss": 2.2089, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00018134630582675998, | |
| "loss": 2.1962, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00018110525376921862, | |
| "loss": 2.1802, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.00018086281657578112, | |
| "loss": 2.1924, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.0001806189983868508, | |
| "loss": 2.1845, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.0001803738033664159, | |
| "loss": 2.198, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00018012723570197858, | |
| "loss": 2.1849, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00017987929960448332, | |
| "loss": 2.1755, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00017962999930824512, | |
| "loss": 2.1981, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00017937933907087703, | |
| "loss": 2.1859, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00017912732317321752, | |
| "loss": 2.1664, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00017887395591925745, | |
| "loss": 2.1795, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.0001786192416360664, | |
| "loss": 2.1862, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00017836318467371888, | |
| "loss": 2.1519, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00017810578940521995, | |
| "loss": 2.1541, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.0001778470602264308, | |
| "loss": 2.1666, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00017758700155599317, | |
| "loss": 2.1724, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00017732561783525446, | |
| "loss": 2.1677, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 2.0888257026672363, | |
| "eval_runtime": 106.8696, | |
| "eval_samples_per_second": 124.629, | |
| "eval_steps_per_second": 15.58, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00017706291352819145, | |
| "loss": 2.5238, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00017679889312133439, | |
| "loss": 2.096, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.00017653356112368998, | |
| "loss": 2.0933, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.00017626692206666476, | |
| "loss": 2.1039, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00017599898050398754, | |
| "loss": 2.0939, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00017572974101163165, | |
| "loss": 2.0904, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.00017545920818773668, | |
| "loss": 2.1099, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.00017518738665253027, | |
| "loss": 2.0936, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.0001749142810482488, | |
| "loss": 2.0855, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.00017463989603905841, | |
| "loss": 2.0943, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.00017436423631097517, | |
| "loss": 2.0859, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.00017408730657178517, | |
| "loss": 2.0732, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00017380911155096408, | |
| "loss": 2.0809, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00017352965599959634, | |
| "loss": 2.0675, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.00017324894469029404, | |
| "loss": 2.0893, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.00017296698241711544, | |
| "loss": 2.0714, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.00017268377399548302, | |
| "loss": 2.0635, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.0001723993242621014, | |
| "loss": 2.0781, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.00017211363807487456, | |
| "loss": 2.096, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.00017182672031282296, | |
| "loss": 2.0508, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.0001715385758760002, | |
| "loss": 2.0748, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.00017124920968540936, | |
| "loss": 2.0777, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.00017095862668291894, | |
| "loss": 2.0694, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.0001706668318311784, | |
| "loss": 2.0762, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.00017037383011353355, | |
| "loss": 2.0849, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.00017007962653394133, | |
| "loss": 2.0816, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.0001697842261168843, | |
| "loss": 2.0551, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.0001694876339072851, | |
| "loss": 2.0851, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.00016918985497041994, | |
| "loss": 2.0696, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.0001688908943918322, | |
| "loss": 2.0712, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.00016859075727724586, | |
| "loss": 2.0522, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.0001682894487524779, | |
| "loss": 2.0487, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.000167986973963351, | |
| "loss": 2.053, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.00016768333807560558, | |
| "loss": 2.0575, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.0001673785462748116, | |
| "loss": 2.062, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.00016707260376628002, | |
| "loss": 2.0396, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.00016676551577497382, | |
| "loss": 2.0562, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.0001664572875454188, | |
| "loss": 2.0456, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.0001661479243416142, | |
| "loss": 2.0543, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.00016583743144694241, | |
| "loss": 2.0695, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.00016552581416407917, | |
| "loss": 2.0613, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.00016521307781490268, | |
| "loss": 2.0313, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.0001648992277404028, | |
| "loss": 2.0416, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.00016458426930059003, | |
| "loss": 2.0529, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.00016426820787440364, | |
| "loss": 2.0344, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.00016395104885962014, | |
| "loss": 2.0335, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00016363279767276075, | |
| "loss": 2.0373, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00016331345974899923, | |
| "loss": 2.0416, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.00016299304054206886, | |
| "loss": 2.0357, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.00016267154552416938, | |
| "loss": 2.0326, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.00016234898018587337, | |
| "loss": 2.0421, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.00016202535003603273, | |
| "loss": 2.0278, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.0001617006606016845, | |
| "loss": 2.0503, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.00016137491742795632, | |
| "loss": 2.0314, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.00016104812607797202, | |
| "loss": 2.0495, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.00016072029213275626, | |
| "loss": 2.035, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.00016039142119113957, | |
| "loss": 2.0221, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.00016006151886966248, | |
| "loss": 2.0347, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.0001597305908024797, | |
| "loss": 2.0261, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.00015939864264126393, | |
| "loss": 2.0282, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.00015906568005510919, | |
| "loss": 2.0107, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.00015873170873043413, | |
| "loss": 2.0284, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.00015839673437088498, | |
| "loss": 2.015, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.000158060762697238, | |
| "loss": 2.0027, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.0001577237994473018, | |
| "loss": 2.0183, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.0001573858503758194, | |
| "loss": 2.0183, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.0001570469212543699, | |
| "loss": 2.012, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.00015670701787127005, | |
| "loss": 1.9962, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00015636614603147512, | |
| "loss": 2.019, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00015602431155648007, | |
| "loss": 2.0047, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00015568152028421988, | |
| "loss": 2.0012, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00015533777806897003, | |
| "loss": 1.9839, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.00015499309078124645, | |
| "loss": 1.9936, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.00015464746430770516, | |
| "loss": 1.9975, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.0001543009045510419, | |
| "loss": 2.0006, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00015395341742989124, | |
| "loss": 2.0144, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.00015360500887872557, | |
| "loss": 1.9964, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.00015325568484775358, | |
| "loss": 1.9902, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.00015290545130281885, | |
| "loss": 2.0048, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.0001525543142252978, | |
| "loss": 2.0061, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.00015220227961199766, | |
| "loss": 1.9854, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.000151849353475054, | |
| "loss": 2.0033, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00015149554184182802, | |
| "loss": 1.9844, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00015114085075480367, | |
| "loss": 1.997, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.0001507852862714844, | |
| "loss": 1.9905, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.0001504288544642898, | |
| "loss": 2.0043, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.00015007156142045176, | |
| "loss": 1.9792, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.00014971341324191068, | |
| "loss": 2.0062, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.0001493544160452111, | |
| "loss": 1.9878, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.00014899457596139729, | |
| "loss": 1.9749, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.00014863389913590864, | |
| "loss": 1.98, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.0001482723917284745, | |
| "loss": 1.9756, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00014791005991300924, | |
| "loss": 1.9749, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.0001475469098775066, | |
| "loss": 1.9647, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.00014718294782393407, | |
| "loss": 1.9799, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.00014681817996812705, | |
| "loss": 1.9697, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00014645261253968262, | |
| "loss": 1.9613, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00014608625178185312, | |
| "loss": 1.9812, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.00014571910395143965, | |
| "loss": 1.9796, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.000145351175318685, | |
| "loss": 1.9723, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.00014498247216716682, | |
| "loss": 1.9648, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.00014461300079369013, | |
| "loss": 1.9464, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.00014424276750817986, | |
| "loss": 1.9698, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 0.00014387177863357307, | |
| "loss": 1.9431, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.00014350004050571092, | |
| "loss": 1.9648, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.00014312755947323052, | |
| "loss": 1.9693, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.0001427543418974566, | |
| "loss": 1.9606, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.0001423803941522925, | |
| "loss": 1.9557, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 0.00014200572262411192, | |
| "loss": 1.9674, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.00014163033371164917, | |
| "loss": 1.9663, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 0.00014125423382589048, | |
| "loss": 1.9341, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.00014087742938996418, | |
| "loss": 1.9535, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 0.00014049992683903104, | |
| "loss": 1.9382, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.00014012173262017443, | |
| "loss": 1.9495, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.00013974285319229025, | |
| "loss": 1.9587, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.0001393632950259765, | |
| "loss": 1.9475, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 0.00013898306460342295, | |
| "loss": 1.96, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 0.00013860216841830018, | |
| "loss": 1.9517, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 0.00013822061297564896, | |
| "loss": 1.9415, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.000137838404791769, | |
| "loss": 1.9566, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 0.00013745555039410763, | |
| "loss": 1.9566, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.00013707205632114842, | |
| "loss": 1.942, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.0001366879291222995, | |
| "loss": 1.9478, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 0.00013630317535778163, | |
| "loss": 1.9668, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 0.0001359178015985163, | |
| "loss": 1.932, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.00013553181442601327, | |
| "loss": 1.964, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 0.00013514522043225855, | |
| "loss": 1.9471, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.00013475802621960137, | |
| "loss": 1.9414, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.0001343702384006418, | |
| "loss": 1.9402, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.0001339818635981176, | |
| "loss": 1.946, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 0.00013359290844479118, | |
| "loss": 1.943, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.0001332033795833364, | |
| "loss": 1.9344, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 0.00013281328366622494, | |
| "loss": 1.9268, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.00013242262735561292, | |
| "loss": 1.938, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 0.00013203141732322693, | |
| "loss": 1.9305, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.0001316396602502501, | |
| "loss": 1.9316, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.00013124736282720822, | |
| "loss": 1.9243, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.00013085453175385513, | |
| "loss": 1.9441, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 0.00013046117373905866, | |
| "loss": 1.9184, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.0001300672955006857, | |
| "loss": 1.9521, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 0.00012967290376548782, | |
| "loss": 1.927, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00012927800526898604, | |
| "loss": 1.9317, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 0.00012888260675535622, | |
| "loss": 1.9251, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.00012848671497731336, | |
| "loss": 1.9138, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.0001280903366959968, | |
| "loss": 1.9152, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.00012769347868085427, | |
| "loss": 1.9302, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 0.0001272961477095267, | |
| "loss": 1.9222, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.00012689835056773228, | |
| "loss": 1.9256, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 0.0001265000940491504, | |
| "loss": 1.9017, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.00012610138495530599, | |
| "loss": 1.9096, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.00012570223009545308, | |
| "loss": 1.9122, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.00012530263628645868, | |
| "loss": 1.9296, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 0.00012490261035268612, | |
| "loss": 1.8998, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.0001245021591258789, | |
| "loss": 1.9104, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 0.00012410128944504359, | |
| "loss": 1.9083, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.00012370000815633327, | |
| "loss": 1.918, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 0.00012329832211293069, | |
| "loss": 1.8937, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.00012289623817493087, | |
| "loss": 1.912, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.00012249376320922436, | |
| "loss": 1.9008, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.00012209090408937971, | |
| "loss": 1.9204, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 0.00012168766769552614, | |
| "loss": 1.9103, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.00012128406091423614, | |
| "loss": 1.8958, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 0.00012088009063840762, | |
| "loss": 1.8986, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.00012047576376714647, | |
| "loss": 1.8898, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 0.00012007108720564853, | |
| "loss": 1.9027, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.00011966606786508182, | |
| "loss": 1.905, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.00011926071266246826, | |
| "loss": 1.9051, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.00011885502852056594, | |
| "loss": 1.9067, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 0.00011844902236775044, | |
| "loss": 1.8823, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.0001180427011378969, | |
| "loss": 1.8949, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 0.00011763607177026131, | |
| "loss": 1.8945, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.0001172291412093621, | |
| "loss": 1.9152, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 0.00011682191640486169, | |
| "loss": 1.8951, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.0001164144043114475, | |
| "loss": 1.9003, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.00011600661188871354, | |
| "loss": 1.8892, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.00011559854610104106, | |
| "loss": 1.8922, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 0.00011519021391748023, | |
| "loss": 1.9047, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.00011478162231163061, | |
| "loss": 1.8902, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 0.00011437277826152224, | |
| "loss": 1.8895, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.00011396368874949652, | |
| "loss": 1.8839, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.00011355436076208687, | |
| "loss": 1.8859, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.00011314480128989953, | |
| "loss": 1.8897, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 0.00011273501732749398, | |
| "loss": 1.882, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.00011232501587326374, | |
| "loss": 1.8906, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 0.00011191480392931655, | |
| "loss": 1.8743, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.00011150438850135508, | |
| "loss": 1.8777, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 0.00011109377659855707, | |
| "loss": 1.882, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.00011068297523345573, | |
| "loss": 1.8653, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.00011027199142181998, | |
| "loss": 1.884, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.00010986083218253456, | |
| "loss": 1.892, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 0.00010944950453748029, | |
| "loss": 1.871, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.00010903801551141395, | |
| "loss": 1.8827, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.00010862637213184852, | |
| "loss": 1.8757, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.00010821458142893301, | |
| "loss": 1.8614, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 0.0001078026504353325, | |
| "loss": 1.8783, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.00010739058618610798, | |
| "loss": 1.8757, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.00010697839571859616, | |
| "loss": 1.8664, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 0.00010656608607228941, | |
| "loss": 1.856, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.82151198387146, | |
| "eval_runtime": 106.7172, | |
| "eval_samples_per_second": 124.807, | |
| "eval_steps_per_second": 15.602, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.00010615366428871542, | |
| "loss": 2.1449, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 0.00010574113741131704, | |
| "loss": 1.7986, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.00010532851248533186, | |
| "loss": 1.7705, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.00010491579655767203, | |
| "loss": 1.7827, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.00010450299667680382, | |
| "loss": 1.8033, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 0.00010409011989262725, | |
| "loss": 1.7888, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 0.00010367717325635583, | |
| "loss": 1.775, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 0.00010326416382039588, | |
| "loss": 1.7692, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.00010285109863822631, | |
| "loss": 1.7874, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 0.00010243798476427801, | |
| "loss": 1.7889, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.00010202482925381358, | |
| "loss": 1.7943, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.00010161163916280654, | |
| "loss": 1.7865, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.00010119842154782104, | |
| "loss": 1.7823, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 0.00010078518346589134, | |
| "loss": 1.7958, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 0.00010037193197440118, | |
| "loss": 1.7666, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 9.995867413096333e-05, | |
| "loss": 1.7981, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 9.954541699329904e-05, | |
| "loss": 1.7924, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 9.913216761911755e-05, | |
| "loss": 1.7811, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 9.871893306599545e-05, | |
| "loss": 1.7969, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 9.830572039125623e-05, | |
| "loss": 1.7927, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 9.789253665184971e-05, | |
| "loss": 1.7776, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 9.747938890423163e-05, | |
| "loss": 1.7733, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 9.706628420424298e-05, | |
| "loss": 1.7968, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 9.665322960698957e-05, | |
| "loss": 1.79, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 9.624023216672161e-05, | |
| "loss": 1.7775, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 9.58272989367131e-05, | |
| "loss": 1.777, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 9.541443696914142e-05, | |
| "loss": 1.7659, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 9.500165331496705e-05, | |
| "loss": 1.792, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 9.458895502381284e-05, | |
| "loss": 1.7792, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 9.41763491438439e-05, | |
| "loss": 1.7761, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 9.376384272164707e-05, | |
| "loss": 1.7733, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 9.335144280211066e-05, | |
| "loss": 1.7803, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 9.293915642830407e-05, | |
| "loss": 1.7798, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 9.252699064135758e-05, | |
| "loss": 1.7952, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 9.211495248034201e-05, | |
| "loss": 1.7811, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 9.170304898214854e-05, | |
| "loss": 1.7879, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 9.129128718136867e-05, | |
| "loss": 1.7709, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 9.087967411017375e-05, | |
| "loss": 1.7762, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 9.046821679819527e-05, | |
| "loss": 1.7718, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 9.00569222724045e-05, | |
| "loss": 1.7775, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 8.964579755699271e-05, | |
| "loss": 1.78, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 8.923484967325104e-05, | |
| "loss": 1.7741, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 8.882408563945067e-05, | |
| "loss": 1.7617, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 8.841351247072296e-05, | |
| "loss": 1.774, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 8.800313717893957e-05, | |
| "loss": 1.7651, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 8.759296677259291e-05, | |
| "loss": 1.7654, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 8.718300825667611e-05, | |
| "loss": 1.7775, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 8.677326863256372e-05, | |
| "loss": 1.7896, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 8.636375489789192e-05, | |
| "loss": 1.7585, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 8.59544740464392e-05, | |
| "loss": 1.7678, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 8.554543306800668e-05, | |
| "loss": 1.759, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 8.513663894829898e-05, | |
| "loss": 1.7755, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 8.472809866880475e-05, | |
| "loss": 1.7686, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 8.431981920667749e-05, | |
| "loss": 1.7535, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 8.391180753461645e-05, | |
| "loss": 1.7704, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 8.350407062074739e-05, | |
| "loss": 1.774, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 8.309661542850375e-05, | |
| "loss": 1.7705, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 8.268944891650767e-05, | |
| "loss": 1.7469, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 8.228257803845113e-05, | |
| "loss": 1.7598, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 8.187600974297714e-05, | |
| "loss": 1.7683, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 8.146975097356117e-05, | |
| "loss": 1.7595, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 8.10638086683926e-05, | |
| "loss": 1.7602, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 8.065818976025606e-05, | |
| "loss": 1.7545, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 8.02529011764131e-05, | |
| "loss": 1.7785, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 7.984794983848399e-05, | |
| "loss": 1.7672, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 7.944334266232938e-05, | |
| "loss": 1.7786, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 7.903908655793224e-05, | |
| "loss": 1.7594, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 7.863518842927993e-05, | |
| "loss": 1.7517, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 7.823165517424609e-05, | |
| "loss": 1.7723, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 7.782849368447301e-05, | |
| "loss": 1.7713, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 7.74257108452539e-05, | |
| "loss": 1.7536, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 7.702331353541529e-05, | |
| "loss": 1.7412, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 7.662130862719944e-05, | |
| "loss": 1.7424, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 7.621970298614717e-05, | |
| "loss": 1.7537, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 7.581850347098052e-05, | |
| "loss": 1.7487, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 7.541771693348556e-05, | |
| "loss": 1.777, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 7.501735021839549e-05, | |
| "loss": 1.7651, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 7.461741016327359e-05, | |
| "loss": 1.7729, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 7.421790359839657e-05, | |
| "loss": 1.7481, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 7.381883734663797e-05, | |
| "loss": 1.7398, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 7.342021822335143e-05, | |
| "loss": 1.7416, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 7.302205303625444e-05, | |
| "loss": 1.7398, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 7.262434858531208e-05, | |
| "loss": 1.7459, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 7.222711166262089e-05, | |
| "loss": 1.7491, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 7.183034905229279e-05, | |
| "loss": 1.7654, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 7.143406753033933e-05, | |
| "loss": 1.7568, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 7.103827386455586e-05, | |
| "loss": 1.7451, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 7.0642974814406e-05, | |
| "loss": 1.7537, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 7.024817713090628e-05, | |
| "loss": 1.7355, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 6.985388755651069e-05, | |
| "loss": 1.7415, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 6.946011282499558e-05, | |
| "loss": 1.73, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 6.90668596613447e-05, | |
| "loss": 1.7344, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 6.867413478163445e-05, | |
| "loss": 1.7402, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 6.82819448929189e-05, | |
| "loss": 1.7299, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 6.789029669311551e-05, | |
| "loss": 1.7505, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 6.74991968708906e-05, | |
| "loss": 1.7365, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 6.710865210554518e-05, | |
| "loss": 1.7553, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 6.671866906690085e-05, | |
| "loss": 1.7333, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 6.632925441518593e-05, | |
| "loss": 1.7383, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 6.59404148009216e-05, | |
| "loss": 1.7512, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 6.555215686480847e-05, | |
| "loss": 1.7351, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 6.516448723761315e-05, | |
| "loss": 1.7418, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 6.477741254005484e-05, | |
| "loss": 1.7204, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 6.43909393826925e-05, | |
| "loss": 1.7271, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 6.400507436581178e-05, | |
| "loss": 1.7325, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 6.361982407931233e-05, | |
| "loss": 1.7285, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 6.323519510259535e-05, | |
| "loss": 1.7424, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 6.285119400445112e-05, | |
| "loss": 1.7481, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 6.246782734294683e-05, | |
| "loss": 1.7494, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 6.208510166531458e-05, | |
| "loss": 1.7261, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 6.170302350783969e-05, | |
| "loss": 1.7314, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 6.132159939574889e-05, | |
| "loss": 1.7328, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 6.094083584309893e-05, | |
| "loss": 1.747, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 6.056073935266547e-05, | |
| "loss": 1.7404, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 6.01813164158318e-05, | |
| "loss": 1.7293, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 5.980257351247818e-05, | |
| "loss": 1.7255, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 5.942451711087102e-05, | |
| "loss": 1.737, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 5.904715366755251e-05, | |
| "loss": 1.7361, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 5.867048962723029e-05, | |
| "loss": 1.7338, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 5.8294531422667544e-05, | |
| "loss": 1.7351, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 5.791928547457279e-05, | |
| "loss": 1.7371, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 5.754475819149072e-05, | |
| "loss": 1.7261, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 5.7170955969692265e-05, | |
| "loss": 1.7324, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 5.67978851930657e-05, | |
| "loss": 1.7284, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 5.6425552233007454e-05, | |
| "loss": 1.7296, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 5.605396344831331e-05, | |
| "loss": 1.723, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 5.5683125185069906e-05, | |
| "loss": 1.7224, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 5.531304377654623e-05, | |
| "loss": 1.717, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 5.4943725543085544e-05, | |
| "loss": 1.739, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 5.457517679199736e-05, | |
| "loss": 1.7183, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 5.420740381744991e-05, | |
| "loss": 1.7357, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 5.384041290036239e-05, | |
| "loss": 1.7197, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 5.3474210308297846e-05, | |
| "loss": 1.715, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.3108802295356155e-05, | |
| "loss": 1.7214, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.2744195102067127e-05, | |
| "loss": 1.7103, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.2380394955283996e-05, | |
| "loss": 1.7168, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.2017408068077064e-05, | |
| "loss": 1.7367, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.1655240639627554e-05, | |
| "loss": 1.728, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.129389885512175e-05, | |
| "loss": 1.736, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 5.093338888564546e-05, | |
| "loss": 1.7134, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 5.0573716888078494e-05, | |
| "loss": 1.7038, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 5.021488900498954e-05, | |
| "loss": 1.7214, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.9856911364531346e-05, | |
| "loss": 1.7163, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.949979008033596e-05, | |
| "loss": 1.7192, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.914353125141037e-05, | |
| "loss": 1.7015, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.8788140962032345e-05, | |
| "loss": 1.7358, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.8433625281646524e-05, | |
| "loss": 1.7355, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.8079990264760686e-05, | |
| "loss": 1.7125, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.7727241950842586e-05, | |
| "loss": 1.7052, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 4.7375386364216465e-05, | |
| "loss": 1.7299, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 4.7024429513960425e-05, | |
| "loss": 1.7245, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 4.6674377393803715e-05, | |
| "loss": 1.7031, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 4.632523598202435e-05, | |
| "loss": 1.7232, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 4.597701124134708e-05, | |
| "loss": 1.7184, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 4.562970911884148e-05, | |
| "loss": 1.7215, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 4.528333554582044e-05, | |
| "loss": 1.7049, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 4.493789643773881e-05, | |
| "loss": 1.7042, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 4.459339769409252e-05, | |
| "loss": 1.7046, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 4.4249845198317593e-05, | |
| "loss": 1.717, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 4.3907244817689875e-05, | |
| "loss": 1.7095, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 4.3565602403224695e-05, | |
| "loss": 1.7151, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 4.3224923789577045e-05, | |
| "loss": 1.6841, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 4.288521479494183e-05, | |
| "loss": 1.7101, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 4.254648122095463e-05, | |
| "loss": 1.7028, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 4.220872885259247e-05, | |
| "loss": 1.7006, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 4.187196345807511e-05, | |
| "loss": 1.717, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 4.153619078876664e-05, | |
| "loss": 1.7117, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 4.120141657907698e-05, | |
| "loss": 1.7073, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 4.086764654636419e-05, | |
| "loss": 1.6958, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 4.0534886390836715e-05, | |
| "loss": 1.7087, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 4.020314179545609e-05, | |
| "loss": 1.7078, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.987241842583983e-05, | |
| "loss": 1.69, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.9542721930164726e-05, | |
| "loss": 1.7025, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 3.9214057939070324e-05, | |
| "loss": 1.6981, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 3.888643206556285e-05, | |
| "loss": 1.7104, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.8559849904919196e-05, | |
| "loss": 1.7035, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.823431703459164e-05, | |
| "loss": 1.6963, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.790983901411225e-05, | |
| "loss": 1.728, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.758642138499819e-05, | |
| "loss": 1.702, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 3.7264069670656886e-05, | |
| "loss": 1.7027, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 3.694278937629197e-05, | |
| "loss": 1.7041, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.6622585988809e-05, | |
| "loss": 1.6936, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.6303464976721834e-05, | |
| "loss": 1.711, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.5985431790059276e-05, | |
| "loss": 1.6902, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.5668491860271945e-05, | |
| "loss": 1.6984, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 3.535265060013965e-05, | |
| "loss": 1.7125, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 3.50379134036787e-05, | |
| "loss": 1.7151, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 3.472428564605006e-05, | |
| "loss": 1.7007, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 3.441177268346724e-05, | |
| "loss": 1.6791, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 3.41003798531052e-05, | |
| "loss": 1.6978, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 3.379011247300889e-05, | |
| "loss": 1.6956, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 3.348097584200258e-05, | |
| "loss": 1.6897, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 3.317297523959927e-05, | |
| "loss": 1.7143, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.2866115925910615e-05, | |
| "loss": 1.6969, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.2560403141557084e-05, | |
| "loss": 1.6883, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 3.225584210757838e-05, | |
| "loss": 1.6873, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 3.195243802534438e-05, | |
| "loss": 1.701, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 3.165019607646611e-05, | |
| "loss": 1.6864, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.6935465335845947, | |
| "eval_runtime": 113.7484, | |
| "eval_samples_per_second": 117.092, | |
| "eval_steps_per_second": 14.638, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 3.134912142270757e-05, | |
| "loss": 1.9367, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 3.104921920589733e-05, | |
| "loss": 1.6312, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 3.075049454784074e-05, | |
| "loss": 1.6483, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 3.0452952550232562e-05, | |
| "loss": 1.6396, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 3.0156598294569727e-05, | |
| "loss": 1.6366, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 2.9861436842064727e-05, | |
| "loss": 1.642, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 2.956747323355895e-05, | |
| "loss": 1.6344, | |
| "step": 3005 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 2.9274712489436785e-05, | |
| "loss": 1.6359, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 2.8983159609539635e-05, | |
| "loss": 1.6309, | |
| "step": 3015 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 2.8692819573080943e-05, | |
| "loss": 1.6433, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 2.840369733856072e-05, | |
| "loss": 1.6478, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 2.8115797843681124e-05, | |
| "loss": 1.6372, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 2.7829126005262042e-05, | |
| "loss": 1.638, | |
| "step": 3035 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 2.7543686719157102e-05, | |
| "loss": 1.6308, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 2.7259484860170203e-05, | |
| "loss": 1.6396, | |
| "step": 3045 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 2.6976525281972078e-05, | |
| "loss": 1.6514, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 2.669481281701739e-05, | |
| "loss": 1.6459, | |
| "step": 3055 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 2.6414352276462417e-05, | |
| "loss": 1.6357, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 2.613514845008275e-05, | |
| "loss": 1.6362, | |
| "step": 3065 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 2.5857206106191447e-05, | |
| "loss": 1.6355, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 2.558052999155768e-05, | |
| "loss": 1.6385, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 2.530512483132561e-05, | |
| "loss": 1.6521, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 2.5030995328933726e-05, | |
| "loss": 1.629, | |
| "step": 3085 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 2.475814616603458e-05, | |
| "loss": 1.6398, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 2.4486582002414716e-05, | |
| "loss": 1.6354, | |
| "step": 3095 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 2.4216307475915066e-05, | |
| "loss": 1.6431, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 2.394732720235189e-05, | |
| "loss": 1.6334, | |
| "step": 3105 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 2.367964577543792e-05, | |
| "loss": 1.6276, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 2.3413267766703773e-05, | |
| "loss": 1.6438, | |
| "step": 3115 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 2.3148197725419983e-05, | |
| "loss": 1.6164, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 2.2884440178519305e-05, | |
| "loss": 1.631, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 2.262199963051934e-05, | |
| "loss": 1.6395, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 2.2360880563445764e-05, | |
| "loss": 1.6389, | |
| "step": 3135 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 2.2101087436755573e-05, | |
| "loss": 1.63, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 2.184262468726098e-05, | |
| "loss": 1.6276, | |
| "step": 3145 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 2.1585496729053768e-05, | |
| "loss": 1.6216, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 2.1329707953429822e-05, | |
| "loss": 1.6282, | |
| "step": 3155 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 2.1075262728814094e-05, | |
| "loss": 1.6315, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 2.0822165400686035e-05, | |
| "loss": 1.6443, | |
| "step": 3165 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 2.0570420291505376e-05, | |
| "loss": 1.6053, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 2.0320031700638352e-05, | |
| "loss": 1.6289, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 2.0071003904284158e-05, | |
| "loss": 1.6252, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 1.9823341155402153e-05, | |
| "loss": 1.6193, | |
| "step": 3185 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 1.9577047683638873e-05, | |
| "loss": 1.6257, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 1.933212769525613e-05, | |
| "loss": 1.6297, | |
| "step": 3195 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 1.9088585373058976e-05, | |
| "loss": 1.6248, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 1.884642487632442e-05, | |
| "loss": 1.6381, | |
| "step": 3205 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 1.8605650340730228e-05, | |
| "loss": 1.6377, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 1.8366265878284382e-05, | |
| "loss": 1.6152, | |
| "step": 3215 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 1.8128275577254884e-05, | |
| "loss": 1.6193, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.789168350209983e-05, | |
| "loss": 1.6174, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.765649369339819e-05, | |
| "loss": 1.6376, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 1.742271016778051e-05, | |
| "loss": 1.622, | |
| "step": 3235 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 1.7190336917860594e-05, | |
| "loss": 1.63, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 1.695937791216716e-05, | |
| "loss": 1.6347, | |
| "step": 3245 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 1.672983709507615e-05, | |
| "loss": 1.6179, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 1.650171838674328e-05, | |
| "loss": 1.6227, | |
| "step": 3255 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 1.6275025683037148e-05, | |
| "loss": 1.6331, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 1.6049762855472685e-05, | |
| "loss": 1.6197, | |
| "step": 3265 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 1.5825933751145027e-05, | |
| "loss": 1.6323, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 1.56035421926639e-05, | |
| "loss": 1.6208, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 1.5382591978088144e-05, | |
| "loss": 1.6216, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 1.5163086880861043e-05, | |
| "loss": 1.6305, | |
| "step": 3285 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 1.494503064974575e-05, | |
| "loss": 1.6324, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 1.4728427008761402e-05, | |
| "loss": 1.6317, | |
| "step": 3295 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 1.4513279657119371e-05, | |
| "loss": 1.6243, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 1.4299592269160166e-05, | |
| "loss": 1.6287, | |
| "step": 3305 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 1.4087368494290677e-05, | |
| "loss": 1.6318, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 1.3876611956921793e-05, | |
| "loss": 1.6273, | |
| "step": 3315 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 1.3667326256406699e-05, | |
| "loss": 1.6143, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 1.345951496697907e-05, | |
| "loss": 1.6209, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 1.3253181637692324e-05, | |
| "loss": 1.6212, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 1.3048329792358883e-05, | |
| "loss": 1.6277, | |
| "step": 3335 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 1.2844962929490045e-05, | |
| "loss": 1.6418, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 1.264308452223616e-05, | |
| "loss": 1.6343, | |
| "step": 3345 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 1.244269801832737e-05, | |
| "loss": 1.6291, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 1.2243806840014726e-05, | |
| "loss": 1.6088, | |
| "step": 3355 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 1.2046414384011718e-05, | |
| "loss": 1.6069, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 1.1850524021436337e-05, | |
| "loss": 1.6121, | |
| "step": 3365 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 1.1656139097753348e-05, | |
| "loss": 1.6305, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 1.1463262932717323e-05, | |
| "loss": 1.6394, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 1.127189882031584e-05, | |
| "loss": 1.6238, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 1.1082050028713309e-05, | |
| "loss": 1.6294, | |
| "step": 3385 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 1.0893719800195057e-05, | |
| "loss": 1.622, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 1.0706911351112037e-05, | |
| "loss": 1.6231, | |
| "step": 3395 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 1.052162787182588e-05, | |
| "loss": 1.6262, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 1.0337872526654358e-05, | |
| "loss": 1.6081, | |
| "step": 3405 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 1.0155648453817435e-05, | |
| "loss": 1.6253, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 9.974958765383591e-06, | |
| "loss": 1.6118, | |
| "step": 3415 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 9.795806547216701e-06, | |
| "loss": 1.6339, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 9.618194858923346e-06, | |
| "loss": 1.6173, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 9.442126733800572e-06, | |
| "loss": 1.6203, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 9.267605178784033e-06, | |
| "loss": 1.6263, | |
| "step": 3435 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 9.094633174396694e-06, | |
| "loss": 1.6188, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 8.923213674697884e-06, | |
| "loss": 1.6063, | |
| "step": 3445 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 8.753349607232864e-06, | |
| "loss": 1.6181, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 8.585043872982868e-06, | |
| "loss": 1.6265, | |
| "step": 3455 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 8.418299346315483e-06, | |
| "loss": 1.6186, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 8.253118874935629e-06, | |
| "loss": 1.6172, | |
| "step": 3465 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 8.089505279836873e-06, | |
| "loss": 1.622, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 7.927461355253307e-06, | |
| "loss": 1.5994, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 7.766989868611774e-06, | |
| "loss": 1.6239, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 7.60809356048463e-06, | |
| "loss": 1.6292, | |
| "step": 3485 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 7.4507751445429315e-06, | |
| "loss": 1.6292, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 7.2950373075101e-06, | |
| "loss": 1.6154, | |
| "step": 3495 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 7.140882709116048e-06, | |
| "loss": 1.6178, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 6.98831398205172e-06, | |
| "loss": 1.6349, | |
| "step": 3505 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 6.8373337319241555e-06, | |
| "loss": 1.6319, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 6.687944537211988e-06, | |
| "loss": 1.6126, | |
| "step": 3515 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 6.540148949221403e-06, | |
| "loss": 1.6207, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 6.393949492042606e-06, | |
| "loss": 1.6072, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 6.249348662506627e-06, | |
| "loss": 1.6298, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 6.106348930142758e-06, | |
| "loss": 1.6197, | |
| "step": 3535 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 5.964952737136353e-06, | |
| "loss": 1.6168, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 5.825162498287096e-06, | |
| "loss": 1.6102, | |
| "step": 3545 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 5.686980600967817e-06, | |
| "loss": 1.6184, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 5.550409405083657e-06, | |
| "loss": 1.6228, | |
| "step": 3555 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 5.415451243031811e-06, | |
| "loss": 1.6221, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 5.282108419661646e-06, | |
| "loss": 1.6303, | |
| "step": 3565 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 5.150383212235421e-06, | |
| "loss": 1.615, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 5.020277870389312e-06, | |
| "loss": 1.6333, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 4.891794616095025e-06, | |
| "loss": 1.6241, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 4.764935643621848e-06, | |
| "loss": 1.6325, | |
| "step": 3585 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 4.639703119499172e-06, | |
| "loss": 1.6164, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 4.516099182479505e-06, | |
| "loss": 1.6207, | |
| "step": 3595 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 4.394125943501936e-06, | |
| "loss": 1.6134, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 4.273785485656068e-06, | |
| "loss": 1.6231, | |
| "step": 3605 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 4.1550798641464605e-06, | |
| "loss": 1.6069, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 4.038011106257555e-06, | |
| "loss": 1.6091, | |
| "step": 3615 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.922581211318999e-06, | |
| "loss": 1.62, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.808792150671525e-06, | |
| "loss": 1.6251, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.6966458676332973e-06, | |
| "loss": 1.6327, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.5861442774667074e-06, | |
| "loss": 1.6004, | |
| "step": 3635 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 3.4772892673456626e-06, | |
| "loss": 1.6086, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 3.3700826963233735e-06, | |
| "loss": 1.625, | |
| "step": 3645 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.2645263953005933e-06, | |
| "loss": 1.6158, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.1606221669943206e-06, | |
| "loss": 1.608, | |
| "step": 3655 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.0583717859070883e-06, | |
| "loss": 1.6005, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 2.9577769982965908e-06, | |
| "loss": 1.6166, | |
| "step": 3665 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 2.858839522145873e-06, | |
| "loss": 1.6157, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 2.7615610471340093e-06, | |
| "loss": 1.6033, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 2.6659432346072156e-06, | |
| "loss": 1.6193, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 2.571987717550517e-06, | |
| "loss": 1.62, | |
| "step": 3685 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 2.4796961005598363e-06, | |
| "loss": 1.619, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 2.389069959814594e-06, | |
| "loss": 1.6346, | |
| "step": 3695 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 2.3001108430507735e-06, | |
| "loss": 1.6115, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 2.2128202695345324e-06, | |
| "loss": 1.6129, | |
| "step": 3705 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 2.1271997300362003e-06, | |
| "loss": 1.5998, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 2.043250686804865e-06, | |
| "loss": 1.6352, | |
| "step": 3715 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 1.9609745735433614e-06, | |
| "loss": 1.6213, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 1.8803727953838335e-06, | |
| "loss": 1.6163, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 1.8014467288636872e-06, | |
| "loss": 1.618, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 1.7241977219020988e-06, | |
| "loss": 1.6189, | |
| "step": 3735 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 1.6486270937770332e-06, | |
| "loss": 1.6139, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 1.574736135102639e-06, | |
| "loss": 1.6163, | |
| "step": 3745 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 1.5025261078073005e-06, | |
| "loss": 1.6152, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 1.4319982451119652e-06, | |
| "loss": 1.6289, | |
| "step": 3755 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 1.3631537515092163e-06, | |
| "loss": 1.6263, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 1.29599380274259e-06, | |
| "loss": 1.6108, | |
| "step": 3765 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 1.2305195457865681e-06, | |
| "loss": 1.6016, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 1.1667320988269614e-06, | |
| "loss": 1.6129, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 1.104632551241802e-06, | |
| "loss": 1.6075, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 1.0442219635827587e-06, | |
| "loss": 1.6188, | |
| "step": 3785 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 9.85501367557029e-07, | |
| "loss": 1.6018, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 9.284717660097086e-07, | |
| "loss": 1.6072, | |
| "step": 3795 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 8.7313413290665e-07, | |
| "loss": 1.6053, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 8.194894133178532e-07, | |
| "loss": 1.6125, | |
| "step": 3805 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 7.675385234013011e-07, | |
| "loss": 1.616, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 7.172823503873605e-07, | |
| "loss": 1.6151, | |
| "step": 3815 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 6.687217525635614e-07, | |
| "loss": 1.637, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 6.218575592600084e-07, | |
| "loss": 1.6135, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 5.766905708351478e-07, | |
| "loss": 1.6159, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 5.332215586621669e-07, | |
| "loss": 1.6136, | |
| "step": 3835 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 4.91451265115761e-07, | |
| "loss": 1.6354, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 4.513804035595093e-07, | |
| "loss": 1.6142, | |
| "step": 3845 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 4.130096583336407e-07, | |
| "loss": 1.6209, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 3.763396847433875e-07, | |
| "loss": 1.6133, | |
| "step": 3855 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 3.413711090477722e-07, | |
| "loss": 1.6025, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 3.081045284488937e-07, | |
| "loss": 1.6299, | |
| "step": 3865 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 2.76540511081802e-07, | |
| "loss": 1.624, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 2.4667959600468414e-07, | |
| "loss": 1.6148, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 2.1852229318977125e-07, | |
| "loss": 1.6089, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 1.9206908351453446e-07, | |
| "loss": 1.6341, | |
| "step": 3885 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 1.6732041875354709e-07, | |
| "loss": 1.6184, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 1.4427672157072414e-07, | |
| "loss": 1.5931, | |
| "step": 3895 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 1.229383855121058e-07, | |
| "loss": 1.605, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 1.0330577499915173e-07, | |
| "loss": 1.6359, | |
| "step": 3905 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 8.537922532252384e-08, | |
| "loss": 1.5941, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 6.915904263634643e-08, | |
| "loss": 1.6207, | |
| "step": 3915 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 5.464550395295476e-08, | |
| "loss": 1.6085, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 4.183885713822111e-08, | |
| "loss": 1.6145, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 3.0739320907269276e-08, | |
| "loss": 1.6102, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 2.134708482077752e-08, | |
| "loss": 1.6245, | |
| "step": 3935 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 1.3662309281714525e-08, | |
| "loss": 1.6035, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 7.685125532586047e-09, | |
| "loss": 1.6038, | |
| "step": 3945 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.415635653258864e-09, | |
| "loss": 1.6323, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 8.539125591511266e-10, | |
| "loss": 1.6162, | |
| "step": 3955 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 0.0, | |
| "loss": 1.6228, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_loss": 1.66698157787323, | |
| "eval_runtime": 111.3625, | |
| "eval_samples_per_second": 119.6, | |
| "eval_steps_per_second": 14.951, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "step": 3960, | |
| "total_flos": 9.454097668453171e+17, | |
| "train_loss": 1.9931427570304485, | |
| "train_runtime": 30194.781, | |
| "train_samples_per_second": 33.608, | |
| "train_steps_per_second": 0.131 | |
| } | |
| ], | |
| "max_steps": 3960, | |
| "num_train_epochs": 4, | |
| "total_flos": 9.454097668453171e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |