SimoneAstarita's picture
Upload folder using huggingface_hub
fbc428b verified
{
"best_metric": 0.7148663237442052,
"best_model_checkpoint": "/kaggle/working/sweep_13/checkpoint-318",
"epoch": 2.0,
"eval_steps": 500,
"global_step": 318,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.31446540880503143,
"grad_norm": 14.776991844177246,
"learning_rate": 1.9062500000000003e-05,
"loss": 0.1982,
"step": 50
},
{
"epoch": 0.6289308176100629,
"grad_norm": 5.086421012878418,
"learning_rate": 1.6830357142857144e-05,
"loss": 0.1704,
"step": 100
},
{
"epoch": 0.9433962264150944,
"grad_norm": 7.991980075836182,
"learning_rate": 1.4598214285714288e-05,
"loss": 0.1255,
"step": 150
},
{
"epoch": 1.0,
"eval_accuracy": 0.7282850779510023,
"eval_f1_macro": 0.6393146860514117,
"eval_loss": 0.09497806429862976,
"eval_precision_macro": 0.6395879038155461,
"eval_recall_macro": 0.7633928571428572,
"eval_runtime": 2.6307,
"eval_samples_per_second": 170.678,
"eval_steps_per_second": 5.702,
"step": 159
},
{
"epoch": 1.2578616352201257,
"grad_norm": 13.86682415008545,
"learning_rate": 1.236607142857143e-05,
"loss": 0.1204,
"step": 200
},
{
"epoch": 1.5723270440251573,
"grad_norm": 36.624263763427734,
"learning_rate": 1.0133928571428573e-05,
"loss": 0.0991,
"step": 250
},
{
"epoch": 1.8867924528301887,
"grad_norm": 14.556501388549805,
"learning_rate": 7.901785714285715e-06,
"loss": 0.092,
"step": 300
},
{
"epoch": 2.0,
"eval_accuracy": 0.821826280623608,
"eval_f1_macro": 0.7148663237442052,
"eval_loss": 0.11809030920267105,
"eval_precision_macro": 0.6885626173236792,
"eval_recall_macro": 0.7853693181818182,
"eval_runtime": 2.6323,
"eval_samples_per_second": 170.573,
"eval_steps_per_second": 5.698,
"step": 318
}
],
"logging_steps": 50,
"max_steps": 477,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"EarlyStoppingCallback": {
"args": {
"early_stopping_patience": 2,
"early_stopping_threshold": 0.0
},
"attributes": {
"early_stopping_patience_counter": 0
}
},
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 935341342670100.0,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}