Training in progress, step 450, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 576742168
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f11485f5cbbc15f2efcd043cf1d070aa52205b3a3971c67b4b875bf0813f0b91
|
| 3 |
size 576742168
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1153583051
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:825f5f0a1c93b523626d0d1e137991efc31a556cee74e0e374b39774fe5ac610
|
| 3 |
size 1153583051
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1337
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:80a28d6040ef389fa05d3c0e2f7ac82202cded02881b02cd3d97be8e9e11710d
|
| 3 |
size 1337
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
-
"epoch": 0.
|
| 6 |
"eval_steps": 15,
|
| 7 |
-
"global_step":
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
@@ -2633,6 +2633,531 @@
|
|
| 2633 |
"learning_rate": 4.073126408628244e-05,
|
| 2634 |
"loss": 0.1824,
|
| 2635 |
"step": 375
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2636 |
}
|
| 2637 |
],
|
| 2638 |
"logging_steps": 1,
|
|
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 0.0862115415701277,
|
| 6 |
"eval_steps": 15,
|
| 7 |
+
"global_step": 450,
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
|
|
| 2633 |
"learning_rate": 4.073126408628244e-05,
|
| 2634 |
"loss": 0.1824,
|
| 2635 |
"step": 375
|
| 2636 |
+
},
|
| 2637 |
+
{
|
| 2638 |
+
"epoch": 0.07203453251192893,
|
| 2639 |
+
"grad_norm": 2.2367873191833496,
|
| 2640 |
+
"learning_rate": 4.056141901817364e-05,
|
| 2641 |
+
"loss": 0.1926,
|
| 2642 |
+
"step": 376
|
| 2643 |
+
},
|
| 2644 |
+
{
|
| 2645 |
+
"epoch": 0.0722261137154181,
|
| 2646 |
+
"grad_norm": 4.916191577911377,
|
| 2647 |
+
"learning_rate": 4.0388638729412704e-05,
|
| 2648 |
+
"loss": 0.1989,
|
| 2649 |
+
"step": 377
|
| 2650 |
+
},
|
| 2651 |
+
{
|
| 2652 |
+
"epoch": 0.07241769491890727,
|
| 2653 |
+
"grad_norm": 2.901930570602417,
|
| 2654 |
+
"learning_rate": 4.021295209854704e-05,
|
| 2655 |
+
"loss": 0.1897,
|
| 2656 |
+
"step": 378
|
| 2657 |
+
},
|
| 2658 |
+
{
|
| 2659 |
+
"epoch": 0.07260927612239644,
|
| 2660 |
+
"grad_norm": 3.3210787773132324,
|
| 2661 |
+
"learning_rate": 4.0034388489890915e-05,
|
| 2662 |
+
"loss": 0.1969,
|
| 2663 |
+
"step": 379
|
| 2664 |
+
},
|
| 2665 |
+
{
|
| 2666 |
+
"epoch": 0.07280085732588562,
|
| 2667 |
+
"grad_norm": 2.4721968173980713,
|
| 2668 |
+
"learning_rate": 3.9852977748617514e-05,
|
| 2669 |
+
"loss": 0.1936,
|
| 2670 |
+
"step": 380
|
| 2671 |
+
},
|
| 2672 |
+
{
|
| 2673 |
+
"epoch": 0.07299243852937479,
|
| 2674 |
+
"grad_norm": 8.125286102294922,
|
| 2675 |
+
"learning_rate": 3.96687501957706e-05,
|
| 2676 |
+
"loss": 0.184,
|
| 2677 |
+
"step": 381
|
| 2678 |
+
},
|
| 2679 |
+
{
|
| 2680 |
+
"epoch": 0.07318401973286395,
|
| 2681 |
+
"grad_norm": 4.848060131072998,
|
| 2682 |
+
"learning_rate": 3.9481736623196593e-05,
|
| 2683 |
+
"loss": 0.1931,
|
| 2684 |
+
"step": 382
|
| 2685 |
+
},
|
| 2686 |
+
{
|
| 2687 |
+
"epoch": 0.07337560093635313,
|
| 2688 |
+
"grad_norm": 7.407880783081055,
|
| 2689 |
+
"learning_rate": 3.9291968288398105e-05,
|
| 2690 |
+
"loss": 0.1852,
|
| 2691 |
+
"step": 383
|
| 2692 |
+
},
|
| 2693 |
+
{
|
| 2694 |
+
"epoch": 0.0735671821398423,
|
| 2695 |
+
"grad_norm": 5.8476033210754395,
|
| 2696 |
+
"learning_rate": 3.9099476909309474e-05,
|
| 2697 |
+
"loss": 0.1944,
|
| 2698 |
+
"step": 384
|
| 2699 |
+
},
|
| 2700 |
+
{
|
| 2701 |
+
"epoch": 0.07375876334333148,
|
| 2702 |
+
"grad_norm": 2.9378533363342285,
|
| 2703 |
+
"learning_rate": 3.8904294658995444e-05,
|
| 2704 |
+
"loss": 0.198,
|
| 2705 |
+
"step": 385
|
| 2706 |
+
},
|
| 2707 |
+
{
|
| 2708 |
+
"epoch": 0.07395034454682065,
|
| 2709 |
+
"grad_norm": 5.6641387939453125,
|
| 2710 |
+
"learning_rate": 3.8706454160273786e-05,
|
| 2711 |
+
"loss": 0.1998,
|
| 2712 |
+
"step": 386
|
| 2713 |
+
},
|
| 2714 |
+
{
|
| 2715 |
+
"epoch": 0.07414192575030983,
|
| 2716 |
+
"grad_norm": 3.131520986557007,
|
| 2717 |
+
"learning_rate": 3.850598848026264e-05,
|
| 2718 |
+
"loss": 0.194,
|
| 2719 |
+
"step": 387
|
| 2720 |
+
},
|
| 2721 |
+
{
|
| 2722 |
+
"epoch": 0.07433350695379899,
|
| 2723 |
+
"grad_norm": 7.007782936096191,
|
| 2724 |
+
"learning_rate": 3.830293112485372e-05,
|
| 2725 |
+
"loss": 0.1963,
|
| 2726 |
+
"step": 388
|
| 2727 |
+
},
|
| 2728 |
+
{
|
| 2729 |
+
"epoch": 0.07452508815728817,
|
| 2730 |
+
"grad_norm": 3.0689289569854736,
|
| 2731 |
+
"learning_rate": 3.809731603311215e-05,
|
| 2732 |
+
"loss": 0.1996,
|
| 2733 |
+
"step": 389
|
| 2734 |
+
},
|
| 2735 |
+
{
|
| 2736 |
+
"epoch": 0.07471666936077734,
|
| 2737 |
+
"grad_norm": 9.517902374267578,
|
| 2738 |
+
"learning_rate": 3.7889177571603766e-05,
|
| 2739 |
+
"loss": 0.1986,
|
| 2740 |
+
"step": 390
|
| 2741 |
+
},
|
| 2742 |
+
{
|
| 2743 |
+
"epoch": 0.07490825056426652,
|
| 2744 |
+
"grad_norm": 5.704578399658203,
|
| 2745 |
+
"learning_rate": 3.767855052865123e-05,
|
| 2746 |
+
"loss": 0.1938,
|
| 2747 |
+
"step": 391
|
| 2748 |
+
},
|
| 2749 |
+
{
|
| 2750 |
+
"epoch": 0.07509983176775568,
|
| 2751 |
+
"grad_norm": 2.295260429382324,
|
| 2752 |
+
"learning_rate": 3.74654701085194e-05,
|
| 2753 |
+
"loss": 0.1909,
|
| 2754 |
+
"step": 392
|
| 2755 |
+
},
|
| 2756 |
+
{
|
| 2757 |
+
"epoch": 0.07529141297124486,
|
| 2758 |
+
"grad_norm": 3.591259241104126,
|
| 2759 |
+
"learning_rate": 3.72499719255313e-05,
|
| 2760 |
+
"loss": 0.1988,
|
| 2761 |
+
"step": 393
|
| 2762 |
+
},
|
| 2763 |
+
{
|
| 2764 |
+
"epoch": 0.07548299417473403,
|
| 2765 |
+
"grad_norm": 3.5636987686157227,
|
| 2766 |
+
"learning_rate": 3.7032091998115533e-05,
|
| 2767 |
+
"loss": 0.1995,
|
| 2768 |
+
"step": 394
|
| 2769 |
+
},
|
| 2770 |
+
{
|
| 2771 |
+
"epoch": 0.07567457537822321,
|
| 2772 |
+
"grad_norm": 4.469659805297852,
|
| 2773 |
+
"learning_rate": 3.6811866742786176e-05,
|
| 2774 |
+
"loss": 0.1879,
|
| 2775 |
+
"step": 395
|
| 2776 |
+
},
|
| 2777 |
+
{
|
| 2778 |
+
"epoch": 0.07586615658171238,
|
| 2779 |
+
"grad_norm": 3.9006998538970947,
|
| 2780 |
+
"learning_rate": 3.658933296805606e-05,
|
| 2781 |
+
"loss": 0.1907,
|
| 2782 |
+
"step": 396
|
| 2783 |
+
},
|
| 2784 |
+
{
|
| 2785 |
+
"epoch": 0.07605773778520154,
|
| 2786 |
+
"grad_norm": 24.030759811401367,
|
| 2787 |
+
"learning_rate": 3.636452786828464e-05,
|
| 2788 |
+
"loss": 0.1977,
|
| 2789 |
+
"step": 397
|
| 2790 |
+
},
|
| 2791 |
+
{
|
| 2792 |
+
"epoch": 0.07624931898869072,
|
| 2793 |
+
"grad_norm": 5.7307868003845215,
|
| 2794 |
+
"learning_rate": 3.61374890174613e-05,
|
| 2795 |
+
"loss": 0.2058,
|
| 2796 |
+
"step": 398
|
| 2797 |
+
},
|
| 2798 |
+
{
|
| 2799 |
+
"epoch": 0.07644090019217989,
|
| 2800 |
+
"grad_norm": 1.7607786655426025,
|
| 2801 |
+
"learning_rate": 3.5908254362925195e-05,
|
| 2802 |
+
"loss": 0.1965,
|
| 2803 |
+
"step": 399
|
| 2804 |
+
},
|
| 2805 |
+
{
|
| 2806 |
+
"epoch": 0.07663248139566907,
|
| 2807 |
+
"grad_norm": 2.303826093673706,
|
| 2808 |
+
"learning_rate": 3.5676862219022764e-05,
|
| 2809 |
+
"loss": 0.1959,
|
| 2810 |
+
"step": 400
|
| 2811 |
+
},
|
| 2812 |
+
{
|
| 2813 |
+
"epoch": 0.07682406259915824,
|
| 2814 |
+
"grad_norm": 2.2567644119262695,
|
| 2815 |
+
"learning_rate": 3.544335126070385e-05,
|
| 2816 |
+
"loss": 0.1915,
|
| 2817 |
+
"step": 401
|
| 2818 |
+
},
|
| 2819 |
+
{
|
| 2820 |
+
"epoch": 0.07701564380264742,
|
| 2821 |
+
"grad_norm": 2.066506862640381,
|
| 2822 |
+
"learning_rate": 3.5207760517057536e-05,
|
| 2823 |
+
"loss": 0.1964,
|
| 2824 |
+
"step": 402
|
| 2825 |
+
},
|
| 2826 |
+
{
|
| 2827 |
+
"epoch": 0.07720722500613658,
|
| 2828 |
+
"grad_norm": 3.4691412448883057,
|
| 2829 |
+
"learning_rate": 3.497012936478882e-05,
|
| 2830 |
+
"loss": 0.1851,
|
| 2831 |
+
"step": 403
|
| 2832 |
+
},
|
| 2833 |
+
{
|
| 2834 |
+
"epoch": 0.07739880620962576,
|
| 2835 |
+
"grad_norm": 2.622666358947754,
|
| 2836 |
+
"learning_rate": 3.473049752163721e-05,
|
| 2837 |
+
"loss": 0.191,
|
| 2838 |
+
"step": 404
|
| 2839 |
+
},
|
| 2840 |
+
{
|
| 2841 |
+
"epoch": 0.07759038741311493,
|
| 2842 |
+
"grad_norm": 10.22749137878418,
|
| 2843 |
+
"learning_rate": 3.448890503973825e-05,
|
| 2844 |
+
"loss": 0.1921,
|
| 2845 |
+
"step": 405
|
| 2846 |
+
},
|
| 2847 |
+
{
|
| 2848 |
+
"epoch": 0.07778196861660411,
|
| 2849 |
+
"grad_norm": 13.989953994750977,
|
| 2850 |
+
"learning_rate": 3.424539229892921e-05,
|
| 2851 |
+
"loss": 0.201,
|
| 2852 |
+
"step": 406
|
| 2853 |
+
},
|
| 2854 |
+
{
|
| 2855 |
+
"epoch": 0.07797354982009327,
|
| 2856 |
+
"grad_norm": 4.6682329177856445,
|
| 2857 |
+
"learning_rate": 3.4e-05,
|
| 2858 |
+
"loss": 0.1975,
|
| 2859 |
+
"step": 407
|
| 2860 |
+
},
|
| 2861 |
+
{
|
| 2862 |
+
"epoch": 0.07816513102358245,
|
| 2863 |
+
"grad_norm": 1.8833575248718262,
|
| 2864 |
+
"learning_rate": 3.375276915789041e-05,
|
| 2865 |
+
"loss": 0.1914,
|
| 2866 |
+
"step": 408
|
| 2867 |
+
},
|
| 2868 |
+
{
|
| 2869 |
+
"epoch": 0.07835671222707162,
|
| 2870 |
+
"grad_norm": 2.786548137664795,
|
| 2871 |
+
"learning_rate": 3.350374109483488e-05,
|
| 2872 |
+
"loss": 0.193,
|
| 2873 |
+
"step": 409
|
| 2874 |
+
},
|
| 2875 |
+
{
|
| 2876 |
+
"epoch": 0.0785482934305608,
|
| 2877 |
+
"grad_norm": 2.270221471786499,
|
| 2878 |
+
"learning_rate": 3.325295743345586e-05,
|
| 2879 |
+
"loss": 0.1931,
|
| 2880 |
+
"step": 410
|
| 2881 |
+
},
|
| 2882 |
+
{
|
| 2883 |
+
"epoch": 0.07873987463404997,
|
| 2884 |
+
"grad_norm": 2.640393018722534,
|
| 2885 |
+
"learning_rate": 3.3000460089807015e-05,
|
| 2886 |
+
"loss": 0.189,
|
| 2887 |
+
"step": 411
|
| 2888 |
+
},
|
| 2889 |
+
{
|
| 2890 |
+
"epoch": 0.07893145583753913,
|
| 2891 |
+
"grad_norm": 1.4697917699813843,
|
| 2892 |
+
"learning_rate": 3.2746291266367376e-05,
|
| 2893 |
+
"loss": 0.195,
|
| 2894 |
+
"step": 412
|
| 2895 |
+
},
|
| 2896 |
+
{
|
| 2897 |
+
"epoch": 0.07912303704102831,
|
| 2898 |
+
"grad_norm": 2.071248769760132,
|
| 2899 |
+
"learning_rate": 3.249049344498758e-05,
|
| 2900 |
+
"loss": 0.1852,
|
| 2901 |
+
"step": 413
|
| 2902 |
+
},
|
| 2903 |
+
{
|
| 2904 |
+
"epoch": 0.07931461824451748,
|
| 2905 |
+
"grad_norm": 2.1426753997802734,
|
| 2906 |
+
"learning_rate": 3.223310937978944e-05,
|
| 2907 |
+
"loss": 0.191,
|
| 2908 |
+
"step": 414
|
| 2909 |
+
},
|
| 2910 |
+
{
|
| 2911 |
+
"epoch": 0.07950619944800666,
|
| 2912 |
+
"grad_norm": 5.456600189208984,
|
| 2913 |
+
"learning_rate": 3.197418209002004e-05,
|
| 2914 |
+
"loss": 0.1904,
|
| 2915 |
+
"step": 415
|
| 2916 |
+
},
|
| 2917 |
+
{
|
| 2918 |
+
"epoch": 0.07969778065149583,
|
| 2919 |
+
"grad_norm": 3.2432124614715576,
|
| 2920 |
+
"learning_rate": 3.171375485286145e-05,
|
| 2921 |
+
"loss": 0.1801,
|
| 2922 |
+
"step": 416
|
| 2923 |
+
},
|
| 2924 |
+
{
|
| 2925 |
+
"epoch": 0.079889361854985,
|
| 2926 |
+
"grad_norm": 5.8231682777404785,
|
| 2927 |
+
"learning_rate": 3.145187119619739e-05,
|
| 2928 |
+
"loss": 0.1928,
|
| 2929 |
+
"step": 417
|
| 2930 |
+
},
|
| 2931 |
+
{
|
| 2932 |
+
"epoch": 0.08008094305847417,
|
| 2933 |
+
"grad_norm": 4.832012176513672,
|
| 2934 |
+
"learning_rate": 3.1188574891337944e-05,
|
| 2935 |
+
"loss": 0.1974,
|
| 2936 |
+
"step": 418
|
| 2937 |
+
},
|
| 2938 |
+
{
|
| 2939 |
+
"epoch": 0.08027252426196335,
|
| 2940 |
+
"grad_norm": 3.440028429031372,
|
| 2941 |
+
"learning_rate": 3.0923909945703634e-05,
|
| 2942 |
+
"loss": 0.202,
|
| 2943 |
+
"step": 419
|
| 2944 |
+
},
|
| 2945 |
+
{
|
| 2946 |
+
"epoch": 0.08046410546545252,
|
| 2947 |
+
"grad_norm": 2.6871285438537598,
|
| 2948 |
+
"learning_rate": 3.0657920595469994e-05,
|
| 2949 |
+
"loss": 0.1868,
|
| 2950 |
+
"step": 420
|
| 2951 |
+
},
|
| 2952 |
+
{
|
| 2953 |
+
"epoch": 0.0806556866689417,
|
| 2954 |
+
"grad_norm": 2.170921802520752,
|
| 2955 |
+
"learning_rate": 3.0390651298173934e-05,
|
| 2956 |
+
"loss": 0.1936,
|
| 2957 |
+
"step": 421
|
| 2958 |
+
},
|
| 2959 |
+
{
|
| 2960 |
+
"epoch": 0.08084726787243086,
|
| 2961 |
+
"grad_norm": 3.358898639678955,
|
| 2962 |
+
"learning_rate": 3.012214672528307e-05,
|
| 2963 |
+
"loss": 0.1838,
|
| 2964 |
+
"step": 422
|
| 2965 |
+
},
|
| 2966 |
+
{
|
| 2967 |
+
"epoch": 0.08103884907592004,
|
| 2968 |
+
"grad_norm": 2.4218928813934326,
|
| 2969 |
+
"learning_rate": 2.9852451754729346e-05,
|
| 2970 |
+
"loss": 0.183,
|
| 2971 |
+
"step": 423
|
| 2972 |
+
},
|
| 2973 |
+
{
|
| 2974 |
+
"epoch": 0.08123043027940921,
|
| 2975 |
+
"grad_norm": 3.2345471382141113,
|
| 2976 |
+
"learning_rate": 2.9581611463408125e-05,
|
| 2977 |
+
"loss": 0.1898,
|
| 2978 |
+
"step": 424
|
| 2979 |
+
},
|
| 2980 |
+
{
|
| 2981 |
+
"epoch": 0.08142201148289839,
|
| 2982 |
+
"grad_norm": 2.3063583374023438,
|
| 2983 |
+
"learning_rate": 2.9309671119643992e-05,
|
| 2984 |
+
"loss": 0.1872,
|
| 2985 |
+
"step": 425
|
| 2986 |
+
},
|
| 2987 |
+
{
|
| 2988 |
+
"epoch": 0.08161359268638756,
|
| 2989 |
+
"grad_norm": 1.7139233350753784,
|
| 2990 |
+
"learning_rate": 2.903667617562464e-05,
|
| 2991 |
+
"loss": 0.1882,
|
| 2992 |
+
"step": 426
|
| 2993 |
+
},
|
| 2994 |
+
{
|
| 2995 |
+
"epoch": 0.08180517388987672,
|
| 2996 |
+
"grad_norm": 3.8402225971221924,
|
| 2997 |
+
"learning_rate": 2.8762672259803936e-05,
|
| 2998 |
+
"loss": 0.1802,
|
| 2999 |
+
"step": 427
|
| 3000 |
+
},
|
| 3001 |
+
{
|
| 3002 |
+
"epoch": 0.0819967550933659,
|
| 3003 |
+
"grad_norm": 2.057905912399292,
|
| 3004 |
+
"learning_rate": 2.8487705169275593e-05,
|
| 3005 |
+
"loss": 0.1769,
|
| 3006 |
+
"step": 428
|
| 3007 |
+
},
|
| 3008 |
+
{
|
| 3009 |
+
"epoch": 0.08218833629685507,
|
| 3010 |
+
"grad_norm": 1.2963840961456299,
|
| 3011 |
+
"learning_rate": 2.8211820862118622e-05,
|
| 3012 |
+
"loss": 0.1965,
|
| 3013 |
+
"step": 429
|
| 3014 |
+
},
|
| 3015 |
+
{
|
| 3016 |
+
"epoch": 0.08237991750034425,
|
| 3017 |
+
"grad_norm": 2.3987321853637695,
|
| 3018 |
+
"learning_rate": 2.793506544971589e-05,
|
| 3019 |
+
"loss": 0.1883,
|
| 3020 |
+
"step": 430
|
| 3021 |
+
},
|
| 3022 |
+
{
|
| 3023 |
+
"epoch": 0.08257149870383342,
|
| 3024 |
+
"grad_norm": 2.685065984725952,
|
| 3025 |
+
"learning_rate": 2.7657485189047044e-05,
|
| 3026 |
+
"loss": 0.1901,
|
| 3027 |
+
"step": 431
|
| 3028 |
+
},
|
| 3029 |
+
{
|
| 3030 |
+
"epoch": 0.0827630799073226,
|
| 3031 |
+
"grad_norm": 2.0090572834014893,
|
| 3032 |
+
"learning_rate": 2.7379126474957098e-05,
|
| 3033 |
+
"loss": 0.1771,
|
| 3034 |
+
"step": 432
|
| 3035 |
+
},
|
| 3036 |
+
{
|
| 3037 |
+
"epoch": 0.08295466111081176,
|
| 3038 |
+
"grad_norm": 2.611567735671997,
|
| 3039 |
+
"learning_rate": 2.7100035832402016e-05,
|
| 3040 |
+
"loss": 0.1846,
|
| 3041 |
+
"step": 433
|
| 3042 |
+
},
|
| 3043 |
+
{
|
| 3044 |
+
"epoch": 0.08314624231430094,
|
| 3045 |
+
"grad_norm": 3.6181955337524414,
|
| 3046 |
+
"learning_rate": 2.6820259908672476e-05,
|
| 3047 |
+
"loss": 0.1807,
|
| 3048 |
+
"step": 434
|
| 3049 |
+
},
|
| 3050 |
+
{
|
| 3051 |
+
"epoch": 0.08333782351779011,
|
| 3052 |
+
"grad_norm": 2.1794869899749756,
|
| 3053 |
+
"learning_rate": 2.6539845465597266e-05,
|
| 3054 |
+
"loss": 0.189,
|
| 3055 |
+
"step": 435
|
| 3056 |
+
},
|
| 3057 |
+
{
|
| 3058 |
+
"epoch": 0.08352940472127929,
|
| 3059 |
+
"grad_norm": 2.181123971939087,
|
| 3060 |
+
"learning_rate": 2.6258839371727483e-05,
|
| 3061 |
+
"loss": 0.195,
|
| 3062 |
+
"step": 436
|
| 3063 |
+
},
|
| 3064 |
+
{
|
| 3065 |
+
"epoch": 0.08372098592476845,
|
| 3066 |
+
"grad_norm": 2.4915378093719482,
|
| 3067 |
+
"learning_rate": 2.597728859450289e-05,
|
| 3068 |
+
"loss": 0.1657,
|
| 3069 |
+
"step": 437
|
| 3070 |
+
},
|
| 3071 |
+
{
|
| 3072 |
+
"epoch": 0.08391256712825763,
|
| 3073 |
+
"grad_norm": 1.9425824880599976,
|
| 3074 |
+
"learning_rate": 2.5695240192401803e-05,
|
| 3075 |
+
"loss": 0.1857,
|
| 3076 |
+
"step": 438
|
| 3077 |
+
},
|
| 3078 |
+
{
|
| 3079 |
+
"epoch": 0.0841041483317468,
|
| 3080 |
+
"grad_norm": 2.0749311447143555,
|
| 3081 |
+
"learning_rate": 2.5412741307075702e-05,
|
| 3082 |
+
"loss": 0.1926,
|
| 3083 |
+
"step": 439
|
| 3084 |
+
},
|
| 3085 |
+
{
|
| 3086 |
+
"epoch": 0.08429572953523598,
|
| 3087 |
+
"grad_norm": 2.1632518768310547,
|
| 3088 |
+
"learning_rate": 2.5129839155469944e-05,
|
| 3089 |
+
"loss": 0.1842,
|
| 3090 |
+
"step": 440
|
| 3091 |
+
},
|
| 3092 |
+
{
|
| 3093 |
+
"epoch": 0.08448731073872515,
|
| 3094 |
+
"grad_norm": 1.8725292682647705,
|
| 3095 |
+
"learning_rate": 2.484658102193195e-05,
|
| 3096 |
+
"loss": 0.1885,
|
| 3097 |
+
"step": 441
|
| 3098 |
+
},
|
| 3099 |
+
{
|
| 3100 |
+
"epoch": 0.08467889194221431,
|
| 3101 |
+
"grad_norm": 1.6631460189819336,
|
| 3102 |
+
"learning_rate": 2.456301425030807e-05,
|
| 3103 |
+
"loss": 0.1941,
|
| 3104 |
+
"step": 442
|
| 3105 |
+
},
|
| 3106 |
+
{
|
| 3107 |
+
"epoch": 0.08487047314570349,
|
| 3108 |
+
"grad_norm": 2.8142271041870117,
|
| 3109 |
+
"learning_rate": 2.4279186236030472e-05,
|
| 3110 |
+
"loss": 0.1796,
|
| 3111 |
+
"step": 443
|
| 3112 |
+
},
|
| 3113 |
+
{
|
| 3114 |
+
"epoch": 0.08506205434919266,
|
| 3115 |
+
"grad_norm": 1.826132893562317,
|
| 3116 |
+
"learning_rate": 2.3995144418195545e-05,
|
| 3117 |
+
"loss": 0.1903,
|
| 3118 |
+
"step": 444
|
| 3119 |
+
},
|
| 3120 |
+
{
|
| 3121 |
+
"epoch": 0.08525363555268184,
|
| 3122 |
+
"grad_norm": 2.832087755203247,
|
| 3123 |
+
"learning_rate": 2.371093627163484e-05,
|
| 3124 |
+
"loss": 0.1904,
|
| 3125 |
+
"step": 445
|
| 3126 |
+
},
|
| 3127 |
+
{
|
| 3128 |
+
"epoch": 0.085445216756171,
|
| 3129 |
+
"grad_norm": 1.9851226806640625,
|
| 3130 |
+
"learning_rate": 2.342660929898014e-05,
|
| 3131 |
+
"loss": 0.1988,
|
| 3132 |
+
"step": 446
|
| 3133 |
+
},
|
| 3134 |
+
{
|
| 3135 |
+
"epoch": 0.08563679795966019,
|
| 3136 |
+
"grad_norm": 4.653574466705322,
|
| 3137 |
+
"learning_rate": 2.3142211022723868e-05,
|
| 3138 |
+
"loss": 0.1911,
|
| 3139 |
+
"step": 447
|
| 3140 |
+
},
|
| 3141 |
+
{
|
| 3142 |
+
"epoch": 0.08582837916314935,
|
| 3143 |
+
"grad_norm": 4.802988052368164,
|
| 3144 |
+
"learning_rate": 2.2857788977276135e-05,
|
| 3145 |
+
"loss": 0.1887,
|
| 3146 |
+
"step": 448
|
| 3147 |
+
},
|
| 3148 |
+
{
|
| 3149 |
+
"epoch": 0.08601996036663853,
|
| 3150 |
+
"grad_norm": 3.352485179901123,
|
| 3151 |
+
"learning_rate": 2.257339070101987e-05,
|
| 3152 |
+
"loss": 0.1902,
|
| 3153 |
+
"step": 449
|
| 3154 |
+
},
|
| 3155 |
+
{
|
| 3156 |
+
"epoch": 0.0862115415701277,
|
| 3157 |
+
"grad_norm": 2.825739860534668,
|
| 3158 |
+
"learning_rate": 2.228906372836517e-05,
|
| 3159 |
+
"loss": 0.1917,
|
| 3160 |
+
"step": 450
|
| 3161 |
}
|
| 3162 |
],
|
| 3163 |
"logging_steps": 1,
|