[2025-07-05 05:40:49,575][__main__][INFO] - cache_dir: /tmp/ dataset: name: kamel-usp/aes_enem_dataset split: JBCS2025 training_params: seed: 42 num_train_epochs: 20 logging_steps: 100 metric_for_best_model: QWK bf16: true bootstrap: enabled: true n_bootstrap: 10000 bootstrap_seed: 42 metrics: - QWK - Macro_F1 - Weighted_F1 post_training_results: model_path: /workspace/jbcs2025/outputs/2025-03-24/20-42-59 experiments: model: name: microsoft/Phi-3.5-mini-instruct type: phi35_classification_lora num_labels: 6 output_dir: ./results/llama31-8b-balanced/C5 logging_dir: ./logs/llama31-8b-balanced/C5 best_model_dir: ./results/llama31-8b-balanced/C5/best_model lora_r: 8 lora_dropout: 0.05 lora_alpha: 16 lora_target_modules: all-linear tokenizer: name: microsoft/Phi-3.5-mini-instruct dataset: grade_index: 4 use_full_context: false training_params: weight_decay: 0.01 warmup_ratio: 0.1 learning_rate: 5.0e-05 train_batch_size: 8 eval_batch_size: 4 gradient_accumulation_steps: 2 gradient_checkpointing: true [2025-07-05 05:40:53,414][__main__][INFO] - GPU 0: NVIDIA H200 | TDP ≈ 700 W [2025-07-05 05:40:53,414][__main__][INFO] - Starting the Fine Tuning training process. [2025-07-05 05:40:56,931][transformers.tokenization_utils_base][INFO] - loading file tokenizer.model from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer.model [2025-07-05 05:40:56,931][transformers.tokenization_utils_base][INFO] - loading file tokenizer.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer.json [2025-07-05 05:40:56,931][transformers.tokenization_utils_base][INFO] - loading file added_tokens.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/added_tokens.json [2025-07-05 05:40:56,931][transformers.tokenization_utils_base][INFO] - loading file special_tokens_map.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/special_tokens_map.json [2025-07-05 05:40:56,931][transformers.tokenization_utils_base][INFO] - loading file tokenizer_config.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer_config.json [2025-07-05 05:40:56,931][transformers.tokenization_utils_base][INFO] - loading file chat_template.jinja from cache at None [2025-07-05 05:40:56,985][transformers.tokenization_utils_base][INFO] - Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained. [2025-07-05 05:40:56,990][__main__][INFO] - Tokenizer function parameters- Padding:longest; Truncation: False; Use Full Context: False [2025-07-05 05:40:58,113][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:40:58,113][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "id2label": { "0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2", "3": "LABEL_3", "4": "LABEL_4", "5": "LABEL_5" }, "initializer_range": 0.02, "intermediate_size": 8192, "label2id": { "LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2, "LABEL_3": 3, "LABEL_4": 4, "LABEL_5": 5 }, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:40:58,114][transformers.modeling_utils][INFO] - loading weights file model.safetensors from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/model.safetensors.index.json [2025-07-05 05:40:58,114][transformers.modeling_utils][INFO] - Will use torch_dtype=torch.bfloat16 as defined in model's config object [2025-07-05 05:40:58,114][transformers.modeling_utils][INFO] - Instantiating Phi3ForSequenceClassification model under default dtype torch.bfloat16. [2025-07-05 05:41:05,543][transformers.modeling_utils][INFO] - Some weights of the model checkpoint at microsoft/Phi-3.5-mini-instruct were not used when initializing Phi3ForSequenceClassification: ['lm_head.weight'] - This IS expected if you are initializing Phi3ForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model). - This IS NOT expected if you are initializing Phi3ForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model). [2025-07-05 05:41:05,543][transformers.modeling_utils][WARNING] - Some weights of Phi3ForSequenceClassification were not initialized from the model checkpoint at microsoft/Phi-3.5-mini-instruct and are newly initialized: ['score.weight'] You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference. [2025-07-05 05:41:06,165][__main__][INFO] - Initialized new PEFT model for ce loss [2025-07-05 05:41:06,167][__main__][INFO] - None [2025-07-05 05:41:06,168][transformers.training_args][INFO] - PyTorch: setting up devices [2025-07-05 05:41:06,204][__main__][INFO] - Total steps: 620. Number of warmup steps: 62 [2025-07-05 05:41:06,209][transformers.trainer][INFO] - You have loaded a model on multiple GPUs. `is_model_parallel` attribute will be force-set to `True` to avoid any unexpected behavior such as device placement mismatching. [2025-07-05 05:41:06,231][transformers.trainer][INFO] - Using auto half precision backend [2025-07-05 05:41:06,231][transformers.trainer][WARNING] - No label_names provided for model class `PeftModelForSequenceClassification`. Since `PeftModel` hides base models input arguments, if label_names is not given, label_names can't be set automatically within `Trainer`. Note that empty label_names list will be used instead. [2025-07-05 05:41:06,233][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:41:06,243][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:41:06,243][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:41:06,243][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:41:18,652][transformers.trainer][INFO] - The following columns in the Training set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:41:18,684][transformers.trainer][INFO] - ***** Running training ***** [2025-07-05 05:41:18,684][transformers.trainer][INFO] - Num examples = 500 [2025-07-05 05:41:18,684][transformers.trainer][INFO] - Num Epochs = 20 [2025-07-05 05:41:18,684][transformers.trainer][INFO] - Instantaneous batch size per device = 8 [2025-07-05 05:41:18,684][transformers.trainer][INFO] - Total train batch size (w. parallel, distributed & accumulation) = 16 [2025-07-05 05:41:18,684][transformers.trainer][INFO] - Gradient Accumulation steps = 2 [2025-07-05 05:41:18,684][transformers.trainer][INFO] - Total optimization steps = 640 [2025-07-05 05:41:18,685][transformers.trainer][INFO] - Number of trainable parameters = 12,601,344 [2025-07-05 05:41:18,759][transformers.models.phi3.modeling_phi3][WARNING] - `use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`. [2025-07-05 05:42:10,056][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:42:10,059][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:42:10,059][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:42:10,059][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:42:22,003][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-32 [2025-07-05 05:42:22,319][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:42:22,320][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:43:13,826][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:43:13,830][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:43:13,830][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:43:13,830][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:43:25,779][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-64 [2025-07-05 05:43:26,090][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:43:26,090][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:43:26,257][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-32] due to args.save_total_limit [2025-07-05 05:44:17,556][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:44:17,559][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:44:17,559][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:44:17,559][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:44:29,514][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-96 [2025-07-05 05:44:29,929][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:44:29,930][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:44:30,065][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-64] due to args.save_total_limit [2025-07-05 05:45:21,357][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:45:21,360][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:45:21,360][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:45:21,360][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:45:33,305][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-128 [2025-07-05 05:45:33,633][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:45:33,633][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:45:33,761][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-96] due to args.save_total_limit [2025-07-05 05:46:25,088][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:46:25,091][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:46:25,091][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:46:25,091][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:46:37,037][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-160 [2025-07-05 05:46:37,351][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:46:37,352][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:47:28,729][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:47:28,732][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:47:28,733][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:47:28,733][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:47:40,682][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-192 [2025-07-05 05:47:40,975][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:47:40,976][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:47:41,057][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-160] due to args.save_total_limit [2025-07-05 05:48:32,329][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:48:32,332][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:48:32,332][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:48:32,332][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:48:44,272][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-224 [2025-07-05 05:48:44,590][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:48:44,591][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:48:44,657][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-192] due to args.save_total_limit [2025-07-05 05:49:35,924][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:49:35,927][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:49:35,927][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:49:35,927][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:49:47,870][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-256 [2025-07-05 05:49:48,162][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:49:48,163][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:49:48,256][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-224] due to args.save_total_limit [2025-07-05 05:50:39,537][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:50:39,540][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:50:39,540][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:50:39,540][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:50:51,477][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-288 [2025-07-05 05:50:51,776][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:50:51,777][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:50:51,861][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-128] due to args.save_total_limit [2025-07-05 05:50:51,865][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-256] due to args.save_total_limit [2025-07-05 05:51:43,152][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:51:43,156][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:51:43,156][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:51:43,156][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:51:55,102][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-320 [2025-07-05 05:51:55,392][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:51:55,392][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:51:55,459][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-288] due to args.save_total_limit [2025-07-05 05:52:46,746][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:52:46,749][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:52:46,749][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:52:46,749][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:52:58,693][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-352 [2025-07-05 05:52:59,021][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:52:59,022][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:53:50,431][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:53:50,434][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:53:50,434][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:53:50,434][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:54:02,376][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-384 [2025-07-05 05:54:02,733][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:54:02,734][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:54:02,856][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-352] due to args.save_total_limit [2025-07-05 05:54:54,116][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:54:54,119][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:54:54,119][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:54:54,119][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:55:06,059][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-416 [2025-07-05 05:55:06,371][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:55:06,371][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:55:06,455][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-384] due to args.save_total_limit [2025-07-05 05:55:57,716][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:55:57,719][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:55:57,719][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:55:57,719][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:56:09,658][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-448 [2025-07-05 05:56:09,956][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:56:09,957][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:56:10,054][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-416] due to args.save_total_limit [2025-07-05 05:57:01,337][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:57:01,340][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:57:01,341][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:57:01,341][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:57:13,282][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-480 [2025-07-05 05:57:13,571][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:57:13,572][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:57:13,658][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-320] due to args.save_total_limit [2025-07-05 05:57:13,661][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-448] due to args.save_total_limit [2025-07-05 05:58:04,946][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:58:04,949][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:58:04,949][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:58:04,949][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:58:16,893][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-512 [2025-07-05 05:58:17,190][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:58:17,191][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:59:08,526][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 05:59:08,529][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 05:59:08,529][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 05:59:08,529][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 05:59:20,468][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-544 [2025-07-05 05:59:20,753][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 05:59:20,754][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 05:59:20,939][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-512] due to args.save_total_limit [2025-07-05 06:00:12,249][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 06:00:12,253][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 06:00:12,253][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 06:00:12,253][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 06:00:24,198][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-576 [2025-07-05 06:00:24,492][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 06:00:24,493][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 06:00:24,559][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-544] due to args.save_total_limit [2025-07-05 06:01:15,848][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 06:01:15,851][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 06:01:15,851][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 06:01:15,851][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 06:01:27,798][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-608 [2025-07-05 06:01:28,132][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 06:01:28,132][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 06:01:28,259][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-576] due to args.save_total_limit [2025-07-05 06:02:19,582][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 06:02:19,585][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 06:02:19,585][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 06:02:19,585][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 06:02:31,537][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-640 [2025-07-05 06:02:31,834][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 06:02:31,834][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 06:02:31,957][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-608] due to args.save_total_limit [2025-07-05 06:02:31,960][transformers.trainer][INFO] - Training completed. Do not forget to share your model on huggingface.co/models =) [2025-07-05 06:02:31,960][transformers.trainer][INFO] - Loading best model from /workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-480 (score: 0.3469800235017627). [2025-07-05 06:02:32,060][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-07-05/05-40-49/results/llama31-8b-balanced/C5/checkpoint-640] due to args.save_total_limit [2025-07-05 06:02:32,065][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 06:02:32,067][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 06:02:32,067][transformers.trainer][INFO] - Num examples = 132 [2025-07-05 06:02:32,067][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 06:02:44,015][__main__][INFO] - Training completed successfully. [2025-07-05 06:02:44,015][__main__][INFO] - Running on Test [2025-07-05 06:02:44,015][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt. If supporting_text, prompt, id, essay_text, grades, essay_year, reference, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-07-05 06:02:44,018][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-07-05 06:02:44,018][transformers.trainer][INFO] - Num examples = 138 [2025-07-05 06:02:44,018][transformers.trainer][INFO] - Batch size = 4 [2025-07-05 06:02:56,805][__main__][INFO] - Test metrics: {'eval_loss': 1.707552433013916, 'eval_model_preparation_time': 0.0072, 'eval_accuracy': 0.2318840579710145, 'eval_RMSE': 74.28889943800584, 'eval_QWK': 0.3294679678249153, 'eval_HDIV': 0.21739130434782605, 'eval_Macro_F1': 0.18099168729276607, 'eval_Micro_F1': 0.2318840579710145, 'eval_Weighted_F1': 0.22564402274026052, 'eval_TP_0': 7, 'eval_TN_0': 85, 'eval_FP_0': 31, 'eval_FN_0': 15, 'eval_TP_1': 6, 'eval_TN_1': 98, 'eval_FP_1': 8, 'eval_FN_1': 26, 'eval_TP_2': 1, 'eval_TN_2': 101, 'eval_FP_2': 13, 'eval_FN_2': 23, 'eval_TP_3': 3, 'eval_TN_3': 92, 'eval_FP_3': 21, 'eval_FN_3': 22, 'eval_TP_4': 15, 'eval_TN_4': 81, 'eval_FP_4': 25, 'eval_FN_4': 17, 'eval_TP_5': 0, 'eval_TN_5': 127, 'eval_FP_5': 8, 'eval_FN_5': 3, 'eval_runtime': 12.7789, 'eval_samples_per_second': 10.799, 'eval_steps_per_second': 2.739, 'epoch': 20.0} [2025-07-05 06:02:56,805][transformers.trainer][INFO] - Saving model checkpoint to ./results/llama31-8b-balanced/C5/best_model [2025-07-05 06:02:57,134][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-07-05 06:02:57,134][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.53.0", "use_cache": true, "vocab_size": 32064 } [2025-07-05 06:02:57,256][transformers.tokenization_utils_base][INFO] - chat template saved in ./results/llama31-8b-balanced/C5/best_model/chat_template.jinja [2025-07-05 06:02:57,256][transformers.tokenization_utils_base][INFO] - tokenizer config file saved in ./results/llama31-8b-balanced/C5/best_model/tokenizer_config.json [2025-07-05 06:02:57,256][transformers.tokenization_utils_base][INFO] - Special tokens file saved in ./results/llama31-8b-balanced/C5/best_model/special_tokens_map.json [2025-07-05 06:02:57,278][__main__][INFO] - Model and tokenizer saved to ./results/llama31-8b-balanced/C5/best_model [2025-07-05 06:02:57,283][__main__][INFO] - Fine Tuning Finished. [2025-07-05 06:02:57,792][__main__][INFO] - Total emissions: 0.0164 kg CO2eq