| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.039255671679837, | |
| "eval_steps": 500, | |
| "global_step": 8000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02549069589599796, | |
| "grad_norm": 2598.015380859375, | |
| "learning_rate": 2.5e-06, | |
| "loss": 223.1421, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05098139179199592, | |
| "grad_norm": 296.58673095703125, | |
| "learning_rate": 5e-06, | |
| "loss": 103.1948, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07647208768799388, | |
| "grad_norm": 1187.214111328125, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 76.4616, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.10196278358399184, | |
| "grad_norm": 1285.8101806640625, | |
| "learning_rate": 1e-05, | |
| "loss": 75.5666, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1274534794799898, | |
| "grad_norm": 661.5672607421875, | |
| "learning_rate": 9.995728791936505e-06, | |
| "loss": 64.9031, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.15294417537598776, | |
| "grad_norm": 371.0738220214844, | |
| "learning_rate": 9.98292246503335e-06, | |
| "loss": 65.2197, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.17843487127198573, | |
| "grad_norm": 403.7045593261719, | |
| "learning_rate": 9.961602898685225e-06, | |
| "loss": 69.9518, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.20392556716798368, | |
| "grad_norm": 431.2552490234375, | |
| "learning_rate": 9.931806517013612e-06, | |
| "loss": 63.1313, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.22941626306398163, | |
| "grad_norm": 324.2401123046875, | |
| "learning_rate": 9.893584226636773e-06, | |
| "loss": 63.1745, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.2549069589599796, | |
| "grad_norm": 527.2928466796875, | |
| "learning_rate": 9.847001329696653e-06, | |
| "loss": 63.3554, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.28039765485597756, | |
| "grad_norm": 311.36553955078125, | |
| "learning_rate": 9.792137412291265e-06, | |
| "loss": 64.9263, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.3058883507519755, | |
| "grad_norm": 1101.414794921875, | |
| "learning_rate": 9.729086208503174e-06, | |
| "loss": 66.4159, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.33137904664797346, | |
| "grad_norm": 276.3797302246094, | |
| "learning_rate": 9.657955440256396e-06, | |
| "loss": 60.7144, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.35686974254397147, | |
| "grad_norm": 403.9580383300781, | |
| "learning_rate": 9.578866633275289e-06, | |
| "loss": 66.2698, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.3823604384399694, | |
| "grad_norm": 224.3489532470703, | |
| "learning_rate": 9.491954909459895e-06, | |
| "loss": 59.2869, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.40785113433596737, | |
| "grad_norm": 315.9915771484375, | |
| "learning_rate": 9.397368756032445e-06, | |
| "loss": 59.4493, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.4333418302319653, | |
| "grad_norm": 286.55816650390625, | |
| "learning_rate": 9.295269771849426e-06, | |
| "loss": 60.4814, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.45883252612796327, | |
| "grad_norm": 323.67498779296875, | |
| "learning_rate": 9.185832391312644e-06, | |
| "loss": 58.4063, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.4843232220239613, | |
| "grad_norm": 375.53961181640625, | |
| "learning_rate": 9.069243586350976e-06, | |
| "loss": 57.9836, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.5098139179199592, | |
| "grad_norm": 191.06105041503906, | |
| "learning_rate": 8.94570254698197e-06, | |
| "loss": 59.9295, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.5353046138159572, | |
| "grad_norm": 233.01463317871094, | |
| "learning_rate": 8.815420340999034e-06, | |
| "loss": 56.8298, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.5607953097119551, | |
| "grad_norm": 277.080078125, | |
| "learning_rate": 8.67861955336566e-06, | |
| "loss": 58.9617, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.5862860056079531, | |
| "grad_norm": 492.3611145019531, | |
| "learning_rate": 8.535533905932739e-06, | |
| "loss": 62.3042, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.611776701503951, | |
| "grad_norm": 252.20265197753906, | |
| "learning_rate": 8.386407858128707e-06, | |
| "loss": 56.0719, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.637267397399949, | |
| "grad_norm": 355.2184753417969, | |
| "learning_rate": 8.231496189304704e-06, | |
| "loss": 55.5271, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.6627580932959469, | |
| "grad_norm": 468.17657470703125, | |
| "learning_rate": 8.071063563448341e-06, | |
| "loss": 59.2408, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.688248789191945, | |
| "grad_norm": 542.2239990234375, | |
| "learning_rate": 7.905384077009693e-06, | |
| "loss": 52.7214, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.7137394850879429, | |
| "grad_norm": 538.4288330078125, | |
| "learning_rate": 7.734740790612137e-06, | |
| "loss": 59.0205, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.7392301809839409, | |
| "grad_norm": 411.6714172363281, | |
| "learning_rate": 7.559425245448006e-06, | |
| "loss": 60.3362, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.7647208768799388, | |
| "grad_norm": 310.0589904785156, | |
| "learning_rate": 7.379736965185369e-06, | |
| "loss": 52.7548, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.7902115727759368, | |
| "grad_norm": 224.0525665283203, | |
| "learning_rate": 7.195982944236853e-06, | |
| "loss": 49.1827, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.8157022686719347, | |
| "grad_norm": 255.24600219726562, | |
| "learning_rate": 7.008477123264849e-06, | |
| "loss": 58.0803, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.8411929645679327, | |
| "grad_norm": 490.8277282714844, | |
| "learning_rate": 6.817539852819149e-06, | |
| "loss": 59.3456, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.8666836604639306, | |
| "grad_norm": 1017.12353515625, | |
| "learning_rate": 6.6234973460234184e-06, | |
| "loss": 52.9023, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.8921743563599286, | |
| "grad_norm": 243.35829162597656, | |
| "learning_rate": 6.426681121245527e-06, | |
| "loss": 55.5555, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.9176650522559265, | |
| "grad_norm": 629.7377319335938, | |
| "learning_rate": 6.227427435703997e-06, | |
| "loss": 53.5047, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.9431557481519246, | |
| "grad_norm": 511.2954406738281, | |
| "learning_rate": 6.026076710978172e-06, | |
| "loss": 51.0799, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.9686464440479226, | |
| "grad_norm": 360.78289794921875, | |
| "learning_rate": 5.82297295140367e-06, | |
| "loss": 54.3857, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.9941371399439205, | |
| "grad_norm": 400.1549987792969, | |
| "learning_rate": 5.61846315634674e-06, | |
| "loss": 51.9264, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 3177.868896484375, | |
| "eval_runtime": 10.4771, | |
| "eval_samples_per_second": 332.821, | |
| "eval_steps_per_second": 41.615, | |
| "step": 3923 | |
| }, | |
| { | |
| "epoch": 1.0196278358399185, | |
| "grad_norm": 331.4139099121094, | |
| "learning_rate": 5.412896727361663e-06, | |
| "loss": 53.4844, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.0451185317359164, | |
| "grad_norm": 368.0908203125, | |
| "learning_rate": 5.206624871244066e-06, | |
| "loss": 58.5974, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 1.0706092276319144, | |
| "grad_norm": 436.7818603515625, | |
| "learning_rate": 5e-06, | |
| "loss": 52.2036, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.0960999235279123, | |
| "grad_norm": 370.1662902832031, | |
| "learning_rate": 4.793375128755934e-06, | |
| "loss": 57.4233, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 1.1215906194239103, | |
| "grad_norm": 947.5311889648438, | |
| "learning_rate": 4.587103272638339e-06, | |
| "loss": 54.2669, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 1.1470813153199082, | |
| "grad_norm": 219.2557830810547, | |
| "learning_rate": 4.381536843653262e-06, | |
| "loss": 52.8739, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.1725720112159062, | |
| "grad_norm": 219.25851440429688, | |
| "learning_rate": 4.17702704859633e-06, | |
| "loss": 49.8615, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 1.198062707111904, | |
| "grad_norm": 268.1202087402344, | |
| "learning_rate": 3.973923289021829e-06, | |
| "loss": 52.5382, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 1.223553403007902, | |
| "grad_norm": 300.58795166015625, | |
| "learning_rate": 3.7725725642960047e-06, | |
| "loss": 56.45, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.2490440989039, | |
| "grad_norm": 596.579833984375, | |
| "learning_rate": 3.573318878754475e-06, | |
| "loss": 50.8593, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 1.274534794799898, | |
| "grad_norm": 313.270751953125, | |
| "learning_rate": 3.3765026539765832e-06, | |
| "loss": 58.2715, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.3000254906958961, | |
| "grad_norm": 755.581298828125, | |
| "learning_rate": 3.1824601471808504e-06, | |
| "loss": 54.2177, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.3255161865918939, | |
| "grad_norm": 184.0687713623047, | |
| "learning_rate": 2.991522876735154e-06, | |
| "loss": 49.5836, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 1.351006882487892, | |
| "grad_norm": 264.9710693359375, | |
| "learning_rate": 2.804017055763149e-06, | |
| "loss": 48.2091, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 1.3764975783838898, | |
| "grad_norm": 447.72174072265625, | |
| "learning_rate": 2.6202630348146323e-06, | |
| "loss": 47.0714, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.401988274279888, | |
| "grad_norm": 397.1292419433594, | |
| "learning_rate": 2.4405747545519966e-06, | |
| "loss": 50.4874, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.4274789701758859, | |
| "grad_norm": 407.992431640625, | |
| "learning_rate": 2.265259209387867e-06, | |
| "loss": 48.07, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 1.4529696660718838, | |
| "grad_norm": 370.75030517578125, | |
| "learning_rate": 2.094615922990309e-06, | |
| "loss": 52.0392, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.4784603619678818, | |
| "grad_norm": 201.114013671875, | |
| "learning_rate": 1.928936436551661e-06, | |
| "loss": 52.9536, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 1.5039510578638797, | |
| "grad_norm": 288.3627624511719, | |
| "learning_rate": 1.7685038106952952e-06, | |
| "loss": 51.5337, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 1.5294417537598777, | |
| "grad_norm": 529.8703002929688, | |
| "learning_rate": 1.6135921418712959e-06, | |
| "loss": 53.8545, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.5549324496558756, | |
| "grad_norm": 266.9444885253906, | |
| "learning_rate": 1.4644660940672628e-06, | |
| "loss": 44.9202, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 1.5804231455518736, | |
| "grad_norm": 481.2966613769531, | |
| "learning_rate": 1.321380446634342e-06, | |
| "loss": 48.9966, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 1.6059138414478715, | |
| "grad_norm": 225.82679748535156, | |
| "learning_rate": 1.1845796590009684e-06, | |
| "loss": 52.9983, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.6314045373438695, | |
| "grad_norm": 269.080810546875, | |
| "learning_rate": 1.0542974530180327e-06, | |
| "loss": 50.8469, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.6568952332398674, | |
| "grad_norm": 181.23312377929688, | |
| "learning_rate": 9.307564136490255e-07, | |
| "loss": 51.3111, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.6823859291358654, | |
| "grad_norm": 288.37408447265625, | |
| "learning_rate": 8.141676086873574e-07, | |
| "loss": 51.2199, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.7078766250318633, | |
| "grad_norm": 295.11163330078125, | |
| "learning_rate": 7.047302281505735e-07, | |
| "loss": 55.1311, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.7333673209278613, | |
| "grad_norm": 606.4634399414062, | |
| "learning_rate": 6.026312439675553e-07, | |
| "loss": 50.0297, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.7588580168238592, | |
| "grad_norm": 400.3865661621094, | |
| "learning_rate": 5.080450905401057e-07, | |
| "loss": 50.0489, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.7843487127198574, | |
| "grad_norm": 709.5574340820312, | |
| "learning_rate": 4.211333667247125e-07, | |
| "loss": 55.2417, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.8098394086158551, | |
| "grad_norm": 219.62893676757812, | |
| "learning_rate": 3.420445597436056e-07, | |
| "loss": 49.912, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.8353301045118533, | |
| "grad_norm": 568.156494140625, | |
| "learning_rate": 2.7091379149682683e-07, | |
| "loss": 54.6138, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.860820800407851, | |
| "grad_norm": 825.6641845703125, | |
| "learning_rate": 2.0786258770873647e-07, | |
| "loss": 50.6318, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.8863114963038492, | |
| "grad_norm": 344.0815124511719, | |
| "learning_rate": 1.5299867030334815e-07, | |
| "loss": 51.9353, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.911802192199847, | |
| "grad_norm": 291.8919677734375, | |
| "learning_rate": 1.0641577336322761e-07, | |
| "loss": 47.0073, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.937292888095845, | |
| "grad_norm": 362.12451171875, | |
| "learning_rate": 6.819348298638839e-08, | |
| "loss": 53.6565, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.9627835839918428, | |
| "grad_norm": 313.963623046875, | |
| "learning_rate": 3.839710131477492e-08, | |
| "loss": 52.6762, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.988274279887841, | |
| "grad_norm": 187.05747985839844, | |
| "learning_rate": 1.7077534966650767e-08, | |
| "loss": 53.921, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 2829.332275390625, | |
| "eval_runtime": 10.5528, | |
| "eval_samples_per_second": 330.433, | |
| "eval_steps_per_second": 41.316, | |
| "step": 7846 | |
| }, | |
| { | |
| "epoch": 2.0137649757838387, | |
| "grad_norm": 539.2352294921875, | |
| "learning_rate": 4.2712080634949024e-09, | |
| "loss": 52.3559, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 2.039255671679837, | |
| "grad_norm": 650.2083129882812, | |
| "learning_rate": 0.0, | |
| "loss": 54.1146, | |
| "step": 8000 | |
| } | |
| ], | |
| "logging_steps": 100, | |
| "max_steps": 8000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 2000, | |
| "total_flos": 0.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |