Mini-Spyra-v.1 / trainer_state.json
Kwokou's picture
Upload 9 files
23a7e1f verified
raw
history blame
18.9 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.8849557522123894,
"eval_steps": 500,
"global_step": 100,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.008849557522123894,
"grad_norm": 87.84410858154297,
"learning_rate": 5e-06,
"loss": 3.4609,
"step": 1
},
{
"epoch": 0.017699115044247787,
"grad_norm": 78.43285369873047,
"learning_rate": 1e-05,
"loss": 2.2898,
"step": 2
},
{
"epoch": 0.02654867256637168,
"grad_norm": 97.12254333496094,
"learning_rate": 9.89795918367347e-06,
"loss": 1.6409,
"step": 3
},
{
"epoch": 0.035398230088495575,
"grad_norm": 33.61081314086914,
"learning_rate": 9.795918367346939e-06,
"loss": 1.7898,
"step": 4
},
{
"epoch": 0.04424778761061947,
"grad_norm": 45.12004852294922,
"learning_rate": 9.693877551020408e-06,
"loss": 1.806,
"step": 5
},
{
"epoch": 0.05309734513274336,
"grad_norm": 45.13540267944336,
"learning_rate": 9.591836734693878e-06,
"loss": 1.5003,
"step": 6
},
{
"epoch": 0.061946902654867256,
"grad_norm": 38.90739440917969,
"learning_rate": 9.489795918367348e-06,
"loss": 2.0699,
"step": 7
},
{
"epoch": 0.07079646017699115,
"grad_norm": 41.833431243896484,
"learning_rate": 9.387755102040818e-06,
"loss": 1.4504,
"step": 8
},
{
"epoch": 0.07964601769911504,
"grad_norm": 40.76021194458008,
"learning_rate": 9.285714285714288e-06,
"loss": 1.8039,
"step": 9
},
{
"epoch": 0.08849557522123894,
"grad_norm": 44.81917190551758,
"learning_rate": 9.183673469387756e-06,
"loss": 2.2376,
"step": 10
},
{
"epoch": 0.09734513274336283,
"grad_norm": 29.365863800048828,
"learning_rate": 9.081632653061225e-06,
"loss": 1.8755,
"step": 11
},
{
"epoch": 0.10619469026548672,
"grad_norm": 39.5786247253418,
"learning_rate": 8.979591836734695e-06,
"loss": 1.9546,
"step": 12
},
{
"epoch": 0.11504424778761062,
"grad_norm": 52.16230010986328,
"learning_rate": 8.877551020408163e-06,
"loss": 2.1494,
"step": 13
},
{
"epoch": 0.12389380530973451,
"grad_norm": 28.816444396972656,
"learning_rate": 8.775510204081633e-06,
"loss": 1.6266,
"step": 14
},
{
"epoch": 0.13274336283185842,
"grad_norm": 46.5515022277832,
"learning_rate": 8.673469387755103e-06,
"loss": 1.7073,
"step": 15
},
{
"epoch": 0.1415929203539823,
"grad_norm": 38.78926086425781,
"learning_rate": 8.571428571428571e-06,
"loss": 2.1972,
"step": 16
},
{
"epoch": 0.1504424778761062,
"grad_norm": 28.463777542114258,
"learning_rate": 8.469387755102042e-06,
"loss": 1.3671,
"step": 17
},
{
"epoch": 0.1592920353982301,
"grad_norm": 32.07529067993164,
"learning_rate": 8.36734693877551e-06,
"loss": 2.0852,
"step": 18
},
{
"epoch": 0.168141592920354,
"grad_norm": 68.01840209960938,
"learning_rate": 8.26530612244898e-06,
"loss": 1.8091,
"step": 19
},
{
"epoch": 0.17699115044247787,
"grad_norm": 34.860294342041016,
"learning_rate": 8.16326530612245e-06,
"loss": 1.9118,
"step": 20
},
{
"epoch": 0.18584070796460178,
"grad_norm": 29.898929595947266,
"learning_rate": 8.06122448979592e-06,
"loss": 1.7433,
"step": 21
},
{
"epoch": 0.19469026548672566,
"grad_norm": 24.96590232849121,
"learning_rate": 7.959183673469388e-06,
"loss": 2.0018,
"step": 22
},
{
"epoch": 0.20353982300884957,
"grad_norm": 31.554832458496094,
"learning_rate": 7.857142857142858e-06,
"loss": 1.9539,
"step": 23
},
{
"epoch": 0.21238938053097345,
"grad_norm": 34.30263900756836,
"learning_rate": 7.755102040816327e-06,
"loss": 1.7578,
"step": 24
},
{
"epoch": 0.22123893805309736,
"grad_norm": 29.654869079589844,
"learning_rate": 7.653061224489796e-06,
"loss": 1.7008,
"step": 25
},
{
"epoch": 0.23008849557522124,
"grad_norm": 38.87590026855469,
"learning_rate": 7.551020408163265e-06,
"loss": 2.2622,
"step": 26
},
{
"epoch": 0.23893805309734514,
"grad_norm": 66.04314422607422,
"learning_rate": 7.448979591836736e-06,
"loss": 1.9321,
"step": 27
},
{
"epoch": 0.24778761061946902,
"grad_norm": 35.426170349121094,
"learning_rate": 7.346938775510205e-06,
"loss": 1.5795,
"step": 28
},
{
"epoch": 0.25663716814159293,
"grad_norm": 42.157798767089844,
"learning_rate": 7.244897959183675e-06,
"loss": 2.7834,
"step": 29
},
{
"epoch": 0.26548672566371684,
"grad_norm": 45.37722396850586,
"learning_rate": 7.1428571428571436e-06,
"loss": 2.2652,
"step": 30
},
{
"epoch": 0.2743362831858407,
"grad_norm": 28.259496688842773,
"learning_rate": 7.0408163265306125e-06,
"loss": 2.6385,
"step": 31
},
{
"epoch": 0.2831858407079646,
"grad_norm": 26.25179672241211,
"learning_rate": 6.938775510204082e-06,
"loss": 1.7319,
"step": 32
},
{
"epoch": 0.2920353982300885,
"grad_norm": 31.81195068359375,
"learning_rate": 6.836734693877551e-06,
"loss": 1.9856,
"step": 33
},
{
"epoch": 0.3008849557522124,
"grad_norm": 19.447708129882812,
"learning_rate": 6.734693877551021e-06,
"loss": 2.3497,
"step": 34
},
{
"epoch": 0.30973451327433627,
"grad_norm": 26.500944137573242,
"learning_rate": 6.63265306122449e-06,
"loss": 1.7276,
"step": 35
},
{
"epoch": 0.3185840707964602,
"grad_norm": 78.01231384277344,
"learning_rate": 6.530612244897959e-06,
"loss": 2.0183,
"step": 36
},
{
"epoch": 0.3274336283185841,
"grad_norm": 27.982969284057617,
"learning_rate": 6.4285714285714295e-06,
"loss": 1.9115,
"step": 37
},
{
"epoch": 0.336283185840708,
"grad_norm": 31.763517379760742,
"learning_rate": 6.326530612244899e-06,
"loss": 1.9091,
"step": 38
},
{
"epoch": 0.34513274336283184,
"grad_norm": 21.773624420166016,
"learning_rate": 6.224489795918368e-06,
"loss": 2.0475,
"step": 39
},
{
"epoch": 0.35398230088495575,
"grad_norm": 20.74641227722168,
"learning_rate": 6.122448979591837e-06,
"loss": 2.0379,
"step": 40
},
{
"epoch": 0.36283185840707965,
"grad_norm": 30.289609909057617,
"learning_rate": 6.020408163265307e-06,
"loss": 1.5474,
"step": 41
},
{
"epoch": 0.37168141592920356,
"grad_norm": 33.947879791259766,
"learning_rate": 5.918367346938776e-06,
"loss": 1.3667,
"step": 42
},
{
"epoch": 0.3805309734513274,
"grad_norm": 30.62605857849121,
"learning_rate": 5.816326530612246e-06,
"loss": 1.5626,
"step": 43
},
{
"epoch": 0.3893805309734513,
"grad_norm": 40.69097137451172,
"learning_rate": 5.7142857142857145e-06,
"loss": 1.9963,
"step": 44
},
{
"epoch": 0.39823008849557523,
"grad_norm": 25.495729446411133,
"learning_rate": 5.6122448979591834e-06,
"loss": 1.4067,
"step": 45
},
{
"epoch": 0.40707964601769914,
"grad_norm": 23.296445846557617,
"learning_rate": 5.510204081632653e-06,
"loss": 1.8675,
"step": 46
},
{
"epoch": 0.415929203539823,
"grad_norm": 58.819698333740234,
"learning_rate": 5.408163265306123e-06,
"loss": 1.4896,
"step": 47
},
{
"epoch": 0.4247787610619469,
"grad_norm": 20.610706329345703,
"learning_rate": 5.306122448979593e-06,
"loss": 1.4755,
"step": 48
},
{
"epoch": 0.4336283185840708,
"grad_norm": 31.651762008666992,
"learning_rate": 5.204081632653062e-06,
"loss": 1.3298,
"step": 49
},
{
"epoch": 0.4424778761061947,
"grad_norm": 34.8726806640625,
"learning_rate": 5.1020408163265315e-06,
"loss": 1.5538,
"step": 50
},
{
"epoch": 0.45132743362831856,
"grad_norm": 30.710643768310547,
"learning_rate": 5e-06,
"loss": 1.565,
"step": 51
},
{
"epoch": 0.46017699115044247,
"grad_norm": 38.70994186401367,
"learning_rate": 4.897959183673469e-06,
"loss": 2.5572,
"step": 52
},
{
"epoch": 0.4690265486725664,
"grad_norm": 29.8387508392334,
"learning_rate": 4.795918367346939e-06,
"loss": 1.876,
"step": 53
},
{
"epoch": 0.4778761061946903,
"grad_norm": 27.06145668029785,
"learning_rate": 4.693877551020409e-06,
"loss": 1.8598,
"step": 54
},
{
"epoch": 0.48672566371681414,
"grad_norm": 28.992870330810547,
"learning_rate": 4.591836734693878e-06,
"loss": 1.9706,
"step": 55
},
{
"epoch": 0.49557522123893805,
"grad_norm": 46.222930908203125,
"learning_rate": 4.489795918367348e-06,
"loss": 1.7333,
"step": 56
},
{
"epoch": 0.504424778761062,
"grad_norm": 26.87865447998047,
"learning_rate": 4.3877551020408165e-06,
"loss": 1.3999,
"step": 57
},
{
"epoch": 0.5132743362831859,
"grad_norm": 37.33481216430664,
"learning_rate": 4.2857142857142855e-06,
"loss": 2.0258,
"step": 58
},
{
"epoch": 0.5221238938053098,
"grad_norm": 35.500083923339844,
"learning_rate": 4.183673469387755e-06,
"loss": 1.5114,
"step": 59
},
{
"epoch": 0.5309734513274337,
"grad_norm": 19.6195125579834,
"learning_rate": 4.081632653061225e-06,
"loss": 1.7526,
"step": 60
},
{
"epoch": 0.5398230088495575,
"grad_norm": 23.47956657409668,
"learning_rate": 3.979591836734694e-06,
"loss": 1.5198,
"step": 61
},
{
"epoch": 0.5486725663716814,
"grad_norm": 33.108558654785156,
"learning_rate": 3.877551020408164e-06,
"loss": 1.9014,
"step": 62
},
{
"epoch": 0.5575221238938053,
"grad_norm": 38.44852828979492,
"learning_rate": 3.7755102040816327e-06,
"loss": 1.7959,
"step": 63
},
{
"epoch": 0.5663716814159292,
"grad_norm": 26.128610610961914,
"learning_rate": 3.6734693877551024e-06,
"loss": 1.2207,
"step": 64
},
{
"epoch": 0.5752212389380531,
"grad_norm": 21.97479248046875,
"learning_rate": 3.5714285714285718e-06,
"loss": 1.8857,
"step": 65
},
{
"epoch": 0.584070796460177,
"grad_norm": 33.13182830810547,
"learning_rate": 3.469387755102041e-06,
"loss": 1.8683,
"step": 66
},
{
"epoch": 0.5929203539823009,
"grad_norm": 17.072967529296875,
"learning_rate": 3.3673469387755105e-06,
"loss": 1.7174,
"step": 67
},
{
"epoch": 0.6017699115044248,
"grad_norm": 38.326087951660156,
"learning_rate": 3.2653061224489794e-06,
"loss": 1.5377,
"step": 68
},
{
"epoch": 0.6106194690265486,
"grad_norm": 32.88009262084961,
"learning_rate": 3.1632653061224496e-06,
"loss": 1.2692,
"step": 69
},
{
"epoch": 0.6194690265486725,
"grad_norm": 19.286846160888672,
"learning_rate": 3.0612244897959185e-06,
"loss": 1.8252,
"step": 70
},
{
"epoch": 0.6283185840707964,
"grad_norm": 21.519533157348633,
"learning_rate": 2.959183673469388e-06,
"loss": 1.8557,
"step": 71
},
{
"epoch": 0.6371681415929203,
"grad_norm": 19.520902633666992,
"learning_rate": 2.8571428571428573e-06,
"loss": 1.704,
"step": 72
},
{
"epoch": 0.6460176991150443,
"grad_norm": 38.84403991699219,
"learning_rate": 2.7551020408163266e-06,
"loss": 1.1003,
"step": 73
},
{
"epoch": 0.6548672566371682,
"grad_norm": 24.24687957763672,
"learning_rate": 2.6530612244897964e-06,
"loss": 1.6043,
"step": 74
},
{
"epoch": 0.6637168141592921,
"grad_norm": 31.633848190307617,
"learning_rate": 2.5510204081632657e-06,
"loss": 2.1552,
"step": 75
},
{
"epoch": 0.672566371681416,
"grad_norm": 26.500728607177734,
"learning_rate": 2.4489795918367347e-06,
"loss": 1.7407,
"step": 76
},
{
"epoch": 0.6814159292035398,
"grad_norm": 23.343164443969727,
"learning_rate": 2.3469387755102044e-06,
"loss": 1.0979,
"step": 77
},
{
"epoch": 0.6902654867256637,
"grad_norm": 21.490922927856445,
"learning_rate": 2.244897959183674e-06,
"loss": 1.4029,
"step": 78
},
{
"epoch": 0.6991150442477876,
"grad_norm": 26.402719497680664,
"learning_rate": 2.1428571428571427e-06,
"loss": 2.0435,
"step": 79
},
{
"epoch": 0.7079646017699115,
"grad_norm": 35.96997833251953,
"learning_rate": 2.0408163265306125e-06,
"loss": 2.2912,
"step": 80
},
{
"epoch": 0.7168141592920354,
"grad_norm": 31.210407257080078,
"learning_rate": 1.938775510204082e-06,
"loss": 1.9319,
"step": 81
},
{
"epoch": 0.7256637168141593,
"grad_norm": 29.655807495117188,
"learning_rate": 1.8367346938775512e-06,
"loss": 1.7095,
"step": 82
},
{
"epoch": 0.7345132743362832,
"grad_norm": 31.187416076660156,
"learning_rate": 1.7346938775510206e-06,
"loss": 1.5105,
"step": 83
},
{
"epoch": 0.7433628318584071,
"grad_norm": 24.92572593688965,
"learning_rate": 1.6326530612244897e-06,
"loss": 1.6805,
"step": 84
},
{
"epoch": 0.7522123893805309,
"grad_norm": 20.41127586364746,
"learning_rate": 1.5306122448979593e-06,
"loss": 1.9271,
"step": 85
},
{
"epoch": 0.7610619469026548,
"grad_norm": 29.233976364135742,
"learning_rate": 1.4285714285714286e-06,
"loss": 1.7253,
"step": 86
},
{
"epoch": 0.7699115044247787,
"grad_norm": 16.428743362426758,
"learning_rate": 1.3265306122448982e-06,
"loss": 1.5462,
"step": 87
},
{
"epoch": 0.7787610619469026,
"grad_norm": 31.367034912109375,
"learning_rate": 1.2244897959183673e-06,
"loss": 1.2827,
"step": 88
},
{
"epoch": 0.7876106194690266,
"grad_norm": 24.767841339111328,
"learning_rate": 1.122448979591837e-06,
"loss": 1.5834,
"step": 89
},
{
"epoch": 0.7964601769911505,
"grad_norm": 23.918731689453125,
"learning_rate": 1.0204081632653063e-06,
"loss": 1.5573,
"step": 90
},
{
"epoch": 0.8053097345132744,
"grad_norm": 24.393335342407227,
"learning_rate": 9.183673469387756e-07,
"loss": 1.2877,
"step": 91
},
{
"epoch": 0.8141592920353983,
"grad_norm": 29.03656768798828,
"learning_rate": 8.163265306122449e-07,
"loss": 1.6835,
"step": 92
},
{
"epoch": 0.8230088495575221,
"grad_norm": 23.890060424804688,
"learning_rate": 7.142857142857143e-07,
"loss": 1.4466,
"step": 93
},
{
"epoch": 0.831858407079646,
"grad_norm": 33.64008331298828,
"learning_rate": 6.122448979591837e-07,
"loss": 1.7625,
"step": 94
},
{
"epoch": 0.8407079646017699,
"grad_norm": 26.439125061035156,
"learning_rate": 5.102040816326531e-07,
"loss": 1.3713,
"step": 95
},
{
"epoch": 0.8495575221238938,
"grad_norm": 22.46098518371582,
"learning_rate": 4.0816326530612243e-07,
"loss": 1.5705,
"step": 96
},
{
"epoch": 0.8584070796460177,
"grad_norm": 21.508468627929688,
"learning_rate": 3.0612244897959183e-07,
"loss": 1.8408,
"step": 97
},
{
"epoch": 0.8672566371681416,
"grad_norm": 25.66583824157715,
"learning_rate": 2.0408163265306121e-07,
"loss": 1.3645,
"step": 98
},
{
"epoch": 0.8761061946902655,
"grad_norm": 21.1544132232666,
"learning_rate": 1.0204081632653061e-07,
"loss": 1.1687,
"step": 99
},
{
"epoch": 0.8849557522123894,
"grad_norm": 22.30592155456543,
"learning_rate": 0.0,
"loss": 1.1249,
"step": 100
},
{
"epoch": 0.8849557522123894,
"step": 100,
"total_flos": 2305515375820800.0,
"train_loss": 1.7753095757961272,
"train_runtime": 5725.6045,
"train_samples_per_second": 0.017,
"train_steps_per_second": 0.017
}
],
"logging_steps": 1,
"max_steps": 100,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2305515375820800.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}