| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.963109354413702, | |
| "eval_steps": 500, | |
| "global_step": 376, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 476.0, | |
| "learning_rate": 5.263157894736842e-06, | |
| "loss": 13.5076, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 27.125, | |
| "learning_rate": 2.6315789473684212e-05, | |
| "loss": 10.3382, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 5.2631578947368424e-05, | |
| "loss": 6.5621, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 5.09375, | |
| "learning_rate": 7.894736842105263e-05, | |
| "loss": 5.4048, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 13.625, | |
| "learning_rate": 0.00010526315789473685, | |
| "loss": 3.8714, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 4.3125, | |
| "learning_rate": 0.00013157894736842108, | |
| "loss": 1.907, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 0.00015789473684210527, | |
| "loss": 1.7156, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.71875, | |
| "learning_rate": 0.00018421052631578948, | |
| "loss": 1.5905, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.4375, | |
| "learning_rate": 0.00019998272238333606, | |
| "loss": 1.4996, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.2109375, | |
| "learning_rate": 0.00019978841775475367, | |
| "loss": 1.424, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 2.515625, | |
| "learning_rate": 0.00019937863245275304, | |
| "loss": 1.3526, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.6953125, | |
| "learning_rate": 0.0001987542513577122, | |
| "loss": 1.3334, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 0.00019791662274292637, | |
| "loss": 1.2863, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.91015625, | |
| "learning_rate": 0.00019686755536317945, | |
| "loss": 1.2384, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.99609375, | |
| "learning_rate": 0.00019560931454896298, | |
| "loss": 1.2248, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.73046875, | |
| "learning_rate": 0.000194144617314777, | |
| "loss": 1.1772, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 0.0001924766264920751, | |
| "loss": 1.1654, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.74609375, | |
| "learning_rate": 0.00019060894389952328, | |
| "loss": 1.1382, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.78515625, | |
| "learning_rate": 0.000188545602565321, | |
| "loss": 1.1537, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "eval_loss": 1.0988351106643677, | |
| "eval_runtime": 16.9422, | |
| "eval_samples_per_second": 16.822, | |
| "eval_steps_per_second": 2.125, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.65625, | |
| "learning_rate": 0.00018629105801837818, | |
| "loss": 1.1162, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 0.84375, | |
| "learning_rate": 0.00018385017866715507, | |
| "loss": 0.9838, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 0.61328125, | |
| "learning_rate": 0.00018122823528693966, | |
| "loss": 0.9895, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 0.8359375, | |
| "learning_rate": 0.00017843088963826435, | |
| "loss": 0.9588, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "grad_norm": 0.74609375, | |
| "learning_rate": 0.00017546418224103838, | |
| "loss": 0.9304, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 0.63671875, | |
| "learning_rate": 0.00017233451933079664, | |
| "loss": 0.9315, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 0.53125, | |
| "learning_rate": 0.00016904865902523096, | |
| "loss": 0.9869, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 0.796875, | |
| "learning_rate": 0.00016561369673087588, | |
| "loss": 0.9399, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 0.75390625, | |
| "learning_rate": 0.00016203704982146073, | |
| "loss": 0.9061, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 2.296875, | |
| "learning_rate": 0.00015832644162101417, | |
| "loss": 0.9537, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 0.6875, | |
| "learning_rate": 0.00015448988472630654, | |
| "loss": 0.9107, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "grad_norm": 0.61328125, | |
| "learning_rate": 0.00015053566370464415, | |
| "loss": 0.9483, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "grad_norm": 0.7265625, | |
| "learning_rate": 0.00014647231720437686, | |
| "loss": 0.9075, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "grad_norm": 0.53515625, | |
| "learning_rate": 0.00014230861951674913, | |
| "loss": 0.9148, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 0.51171875, | |
| "learning_rate": 0.0001380535616289099, | |
| "loss": 0.9142, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 0.5703125, | |
| "learning_rate": 0.00013371633180899416, | |
| "loss": 0.8966, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 0.53515625, | |
| "learning_rate": 0.00012930629576520132, | |
| "loss": 0.8795, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 0.609375, | |
| "learning_rate": 0.00012483297642171333, | |
| "loss": 0.8548, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "grad_norm": 0.53515625, | |
| "learning_rate": 0.00012030603335512468, | |
| "loss": 0.9028, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "eval_loss": 0.8055516481399536, | |
| "eval_runtime": 16.4906, | |
| "eval_samples_per_second": 17.283, | |
| "eval_steps_per_second": 2.183, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.58203125, | |
| "learning_rate": 0.00011573524193578863, | |
| "loss": 0.8422, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "grad_norm": 0.57421875, | |
| "learning_rate": 0.00011113047221912096, | |
| "loss": 0.7173, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "grad_norm": 0.52734375, | |
| "learning_rate": 0.0001065016676324433, | |
| "loss": 0.6956, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.53125, | |
| "learning_rate": 0.0001018588235033888, | |
| "loss": 0.6894, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "grad_norm": 0.54296875, | |
| "learning_rate": 9.721196547623584e-05, | |
| "loss": 0.6804, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "grad_norm": 0.4765625, | |
| "learning_rate": 9.257112786277631e-05, | |
| "loss": 0.6327, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 8.79463319744677e-05, | |
| "loss": 0.637, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "grad_norm": 0.53125, | |
| "learning_rate": 8.334756448265781e-05, | |
| "loss": 0.6765, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 7.878475585361045e-05, | |
| "loss": 0.6518, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 0.5390625, | |
| "learning_rate": 7.42677589048989e-05, | |
| "loss": 0.6535, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 0.5078125, | |
| "learning_rate": 6.98063275294722e-05, | |
| "loss": 0.6311, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 0.52734375, | |
| "learning_rate": 6.54100956333369e-05, | |
| "loss": 0.6438, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.55859375, | |
| "learning_rate": 6.108855633233546e-05, | |
| "loss": 0.6366, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "grad_norm": 0.51171875, | |
| "learning_rate": 5.6851041452943646e-05, | |
| "loss": 0.627, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "grad_norm": 0.498046875, | |
| "learning_rate": 5.270670138135234e-05, | |
| "loss": 0.6359, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "grad_norm": 0.49609375, | |
| "learning_rate": 4.866448530434692e-05, | |
| "loss": 0.6727, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "grad_norm": 0.49609375, | |
| "learning_rate": 4.4733121884651664e-05, | |
| "loss": 0.6406, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 0.54296875, | |
| "learning_rate": 4.092110041246865e-05, | |
| "loss": 0.6535, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "grad_norm": 0.48046875, | |
| "learning_rate": 3.7236652473911814e-05, | |
| "loss": 0.6553, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "eval_loss": 0.6577403545379639, | |
| "eval_runtime": 16.6273, | |
| "eval_samples_per_second": 17.141, | |
| "eval_steps_per_second": 2.165, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.578125, | |
| "learning_rate": 3.36877341759205e-05, | |
| "loss": 0.6292, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "grad_norm": 0.7421875, | |
| "learning_rate": 3.0282008966036646e-05, | |
| "loss": 0.4973, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "grad_norm": 0.54296875, | |
| "learning_rate": 2.7026831084143255e-05, | |
| "loss": 0.4967, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "grad_norm": 0.5234375, | |
| "learning_rate": 2.3929229681898003e-05, | |
| "loss": 0.5102, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "grad_norm": 0.50390625, | |
| "learning_rate": 2.0995893644155008e-05, | |
| "loss": 0.4894, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "grad_norm": 0.515625, | |
| "learning_rate": 1.823315714515018e-05, | |
| "loss": 0.5026, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "grad_norm": 0.49609375, | |
| "learning_rate": 1.5646985970639717e-05, | |
| "loss": 0.4853, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "grad_norm": 0.50390625, | |
| "learning_rate": 1.324296463552821e-05, | |
| "loss": 0.483, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "grad_norm": 0.4921875, | |
| "learning_rate": 1.1026284324803494e-05, | |
| "loss": 0.5077, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "grad_norm": 0.51953125, | |
| "learning_rate": 9.001731683818337e-06, | |
| "loss": 0.5099, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "grad_norm": 0.50390625, | |
| "learning_rate": 7.17367848212539e-06, | |
| "loss": 0.4787, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "grad_norm": 0.5078125, | |
| "learning_rate": 5.546072173184791e-06, | |
| "loss": 0.4859, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "grad_norm": 0.5, | |
| "learning_rate": 4.1224273703294514e-06, | |
| "loss": 0.5011, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "grad_norm": 0.5078125, | |
| "learning_rate": 2.905818257394799e-06, | |
| "loss": 0.5044, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "grad_norm": 0.5078125, | |
| "learning_rate": 1.8988719504013374e-06, | |
| "loss": 0.4728, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "grad_norm": 0.51171875, | |
| "learning_rate": 1.103762824624377e-06, | |
| "loss": 0.5035, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "grad_norm": 0.498046875, | |
| "learning_rate": 5.222078193011126e-07, | |
| "loss": 0.4899, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "grad_norm": 0.50390625, | |
| "learning_rate": 1.554627301140199e-07, | |
| "loss": 0.4906, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "grad_norm": 0.4921875, | |
| "learning_rate": 4.319497456273247e-09, | |
| "loss": 0.4936, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "eval_loss": 0.6385390162467957, | |
| "eval_runtime": 16.4691, | |
| "eval_samples_per_second": 17.305, | |
| "eval_steps_per_second": 2.186, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "step": 376, | |
| "total_flos": 2.3511765822965023e+18, | |
| "train_loss": 1.1451922758938151, | |
| "train_runtime": 4455.6354, | |
| "train_samples_per_second": 5.447, | |
| "train_steps_per_second": 0.084 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 376, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 4, | |
| "save_steps": 500, | |
| "total_flos": 2.3511765822965023e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |