diff --git "a/20250121165312/rank0.log" "b/20250121165312/rank0.log" new file mode 100644--- /dev/null +++ "b/20250121165312/rank0.log" @@ -0,0 +1,900 @@ +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:16][INFO] Namespace(llm='/mnt/hwfile/opendatalab/panzhuoshi/huggingface/hub/models--Qwen--Qwen2.5-72B-Instruct/snapshots/d3d951150c1e5848237cd6a7ad11df4836aee842', tokenizer=None, chat_template='qwen2', use_lora=False, lora_targets=None, lora_r=64, lora_alpha=16, lora_dropout=0.1, lora_bias='none', dtype='auto', selective_recompute=1.0, shard_strategy='full', cpu_offload=False, sp_size=4, datasets=['/mnt/petrelfs/caimengzhang/data/20b_data//base50v2-new-zh-en30w-combinev9-mls-chatbeta2'], dset_file_types=dict_keys(['.jsonl', '.json']), dset_sources=['local'], dset_formats=['openai'], dset_sample_ratios=[1.0], dset_cache_dir='/mnt/petrelfs/caimengzhang/cached_data/qwen25_72b_inst_base50v2-new-zh-en30w-combinev9-mls-chatbeta2', dset_pack_level='soft', global_pack=True, max_length=32768, num_workers=1, file_pattern=None, group_by_length=True, mirco_batch_size=1, global_batch_size=64, lr=2e-05, lr_min=6e-06, wd=0.01, max_grad_norm=1, epochs=1, warmup_ratio=0.025, config=None, work_dir='checkpoints/qwen25_72b_inst_base50v2-new-zh-en30w-combinev9-mls-chatbeta2/20250121165312', feishu_webhook=None, gc_interval=100, checkpoint_interval=200000.0, checkpoint_max_keep=1, checkpoint_drop_optimizer=True, log_interval=1, resume=False, seed=0, debug=False) +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:16][INFO] +------------------------------------------------------------ +Runtime environment: + sys.platform: linux + Python: 3.10.16 (main, Dec 11 2024, 16:24:50) [GCC 11.2.0] + CUDA available: True + MUSA available: False + numpy_random_seed: 0 + GPU 0,1,2,3,4,5,6,7: NVIDIA A100-SXM4-80GB + CUDA_HOME: /mnt/petrelfs/share/test-cuda/cuda-12.1 + NVCC: Cuda compilation tools, release 12.1, V12.1.66 + GCC: gcc (GCC) 11.2.0 + PyTorch: 2.4.0 + PyTorch compiling details: PyTorch built with: + - GCC 9.3 + - C++ Version: 201703 + - Intel(R) oneAPI Math Kernel Library Version 2023.1-Product Build 20230303 for Intel(R) 64 architecture applications + - Intel(R) MKL-DNN v3.4.2 (Git Hash 1137e04ec0b5251ca2b4400a4fd3c667ce843d67) + - OpenMP 201511 (a.k.a. OpenMP 4.5) + - LAPACK is enabled (usually provided by MKL) + - NNPACK is enabled + - CPU capability usage: AVX2 + - CUDA Runtime 12.1 + - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_61,code=sm_61;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_90,code=sm_90 + - CuDNN 90.1 (built against CUDA 12.4) + - Magma 2.6.1 + - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=12.1, CUDNN_VERSION=9.1.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wsuggest-override -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=2.4.0, USE_CUDA=ON, USE_CUDNN=ON, USE_CUSPARSELT=1, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_GLOO=ON, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, USE_ROCM_KERNEL_ASSERT=OFF, + + TorchVision: 0.19.0 + OpenCV: 4.7.0 + MMEngine: 0.10.5 + Transformers: 4.45.1 + XTuner: 0.1.23+657d74 + Seed: 0 + World Size: 64 + DP Size: 16 + SP Size: 4 + TP Size: 1 +------------------------------------------------------------ + +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:16][INFO] Found 8 files in /mnt/petrelfs/caimengzhang/data/20b_data//base50v2-new-zh-en30w-combinev9-mls-chatbeta2 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:19][WARNING] /mnt/petrelfs/caimengzhang/data/20b_data//base50v2-new-zh-en30w-combinev9-mls-chatbeta2/data_part_1.jsonl has 4 prompt length>32768, discard. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:21][WARNING] /mnt/petrelfs/caimengzhang/data/20b_data//base50v2-new-zh-en30w-combinev9-mls-chatbeta2/data_part_2.jsonl has 4 prompt length>32768, discard. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:23][WARNING] /mnt/petrelfs/caimengzhang/data/20b_data//base50v2-new-zh-en30w-combinev9-mls-chatbeta2/data_part_3.jsonl has 5 prompt length>32768, discard. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:25][WARNING] /mnt/petrelfs/caimengzhang/data/20b_data//base50v2-new-zh-en30w-combinev9-mls-chatbeta2/data_part_4.jsonl has 6 prompt length>32768, discard. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:27][WARNING] /mnt/petrelfs/caimengzhang/data/20b_data//base50v2-new-zh-en30w-combinev9-mls-chatbeta2/data_part_5.jsonl has 2 prompt length>32768, discard. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:29][WARNING] /mnt/petrelfs/caimengzhang/data/20b_data//base50v2-new-zh-en30w-combinev9-mls-chatbeta2/data_part_6.jsonl has 4 prompt length>32768, discard. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:31][WARNING] /mnt/petrelfs/caimengzhang/data/20b_data//base50v2-new-zh-en30w-combinev9-mls-chatbeta2/data_part_7.jsonl has 3 prompt length>32768, discard. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:33][WARNING] /mnt/petrelfs/caimengzhang/data/20b_data//base50v2-new-zh-en30w-combinev9-mls-chatbeta2/data_part_8.jsonl has 1 prompt length>32768, discard. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:36][INFO] [Dataset] (Original) 994065 samples. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:36][INFO] [Dataset] (Packed) 37907 samples. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:36][INFO] [Dataloader] 2372 batches. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:37][DEBUG] [Dataloader] Training Batch: +{'input_ids': tensor([[151644, 872, 198, ..., 13, 151645, 198]]), 'labels': tensor([[ -100, -100, -100, ..., 13, 151645, -100]]), 'num_tokens': tensor([ 466, 1305, 1323, 393, 2148, 1594, 305, 256, 2759, 2079, 476, 2275, + 22, 2552, 565, 86, 743, 79, 402, 855, 3265, 608, 549, 2220, + 1421, 321, 1342, 228, 331, 171, 536], dtype=torch.int32), 'attention_mask': tensor([[True, True, True, ..., True, True, True]])} +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:37][DEBUG] [Dataloader] Training Batch(Decoded): +['<|im_start|>user\nKérem, állapítsa meg az alábbi mondat érzelmi irányultságát: \'Neutral\', \'Positive\' vagy \'Negative\', és adja meg az elemzése indokait.\n\nA mezőmadarasi Madarassy (Soltész) család kúriájának állapota az utóbbi években romlott, ami aggasztja a helyi közösséget, hiszen fontos része a település történetének és kultúrájának.<|im_end|>\n<|im_start|>assistant\nNegative. A bejegyzés a mezőmadarasi Madarassy (Soltész) család kúriájának állapotáról szól, és azt említi, hogy az utóbbi években romlott. Ez aggasztó a helyi közösség számára, amely számára a kúria fontos része a település történetének és kultúrájának. A "romlott" szó használata egyértelműen negatív konnotációval bír, és azt jelzi, hogy a kúria nem csak fizikailag, hanem esetlegesen kulturálisan is veszít az értékéből.\n\nA helyi közösség aggódása is egy negatív érzés, mivel a kúria értéke és jelentősége csökken, ami hatással van a település identitására. A történelmi és kulturális értékek megőrzése a közösségek szempontjából kulcsfontosságú, így a kúria romlása kiemelt aggályt jelent.\n\nEzen szempontok összességében a bejegyzés negatív megítélését támasztják alá.<|im_end|>\n<|im_start|>user\n如何设定一个可行的项目进度?<|im_end|>\n<|im_start|>assistant\n要设定一个可行的项目进度,您可以按照以下步骤进行:\n\n1. 确定项目目标和范围:确定项目完成后需要实现的目标和范围,这有助于您更好地了解需要完成哪些任务和工作。\n\n2. 制定项目计划:制定详细的项目计划,包括每个任务的起始和结束日期、时间表和所需资源等信息。\n\n3. 估算任务时间:对所有任务进行时间估算,以确定需要多长时间才能完成每个任务。\n\n4. 确定关键路径:确定项目中的关键路径,这是指所有任务中所需时间最长的路径。这有助于您了解哪些任务可能会影响整个项目的进度。\n\n5. 确定资源:确定项目所需的资源,包括人力资源、资金和设备等。\n\n6. 分配任务:将任务分配给团队成员,并确保每个人都理解和接受自己的责任和任务。\n\n7. 监控进度:定期监测项目进度,并记录实际进展与计划进展之间的差异。如果出现偏差,及时采取措施纠正。\n\n总之,设定可行的项目进度需要全面的计划和精细的执行,而且需要不断地监控和调整,以确保项目能够按计划顺利完成。<|im_end|>\n<|im_start|>user\n如何确定任务之间的依赖关系?<|im_end|>\n<|im_start|>assistant\n确定任务之间的依赖关系是制定项目计划的重要一步,以下是一些方法:\n\n1. 前置任务:前置任务是指某个任务必须在另一个任务之前完成。这种依赖关系可以在项目计划中使用箭头或线条表示。\n\n2. 后续任务:后续任务是指某个任务必须在另一个任务完成后开始。这种依赖关系也可以使用箭头或线条表示。\n\n3. 里程碑:里程碑是指某个重要的事件或任务的完成。���程碑可以作为其他任务的依赖点,以确保这些任务在里程碑完成之前完成。\n\n4. 并行任务:有些任务可以同时进行,而不需要等待其他任务完成。这种任务之间没有依赖关系。\n\n5. 外部依赖:某些任务可能需要外部资源或信息才能开始或完成。这种依赖关系需要在计划中予以考虑。\n\n在制定项目计划时,建议将这些任务之间的依赖关系绘制成一个图表,以便更好地理解和管理它们。同时,还要定期监控任务的进展情况,确保每个任务都按照依赖关系顺利完成。<|im_end|>\n<|im_start|>user\n如何在项目计划中有效地分配资源?<|im_end|>\n<|im_start|>assistant\n在项目计划中有效地分配资源是确保项目成功的关键之一,以下是一些方法:\n\n1. 确定资源需求:首先需要确定项目所需的资源类型和数量,包括人力资源、设备、资金等。\n\n2. 识别瓶颈资源:了解哪些资源是项目的瓶颈,即哪些资源可能成为项目进度的限制因素,以便优先考虑这些资源的分配。\n\n3. 优先级排序:根据项目目标和重要性,将任务按优先级排序,并将资源分配到最需要的任务上。\n\n4. 平衡资源:在分配资源时,需要平衡不同任务之间的资源需求,以避免资源过度分配或不足分配的情况发生。\n\n5. 优化资源利用:优化资源利用可以有效地减少资源浪费,例如将某些资源分配给多个任务,或者利用软件和自动化工具来提高资源效率。\n\n6. 监控资源使用情况:定期监控资源的使用情况,以确保资源的合理利用,并及时调整分配方案,以确保项目能够按计划顺利完成。\n\n总之,在项目计划中有效地分配资源需要全面的计划和精细的执行,而且需要不断地监控和调整,以确保资源的合理利用和项目的顺利完成。<|im_end|>\n<|im_start|>user\n如何有效地解决项目中的风险问题?<|im_end|>\n<|im_start|>assistant\n在项目中解决风险问题是确保项目成功的关键之一,以下是一些方法:\n\n1. 风险识别:在项目计划的早期阶段,需要对可能出现的风险进行较为全面的识别,包括项目进度、质量、成本等方面。可以借助SWOT分析等工具识别风险。\n\n2. 风险评估:对识别出的每个风险进行评估,包括风险的概率、影响程度和应对措施等方面。评估结果可以帮助制定风险管理计划。\n\n3. 风险管理计划:制定详细的风险管理计划,包括针对每个风险的应对策略、责任人、时间表等信息。风险管理计划需要在项目计划中得到充分的体现。\n\n4. 风险监控:定期监控风险的出现和演变情况,例如利用流程图、Gantt图等工具,及时改变风险管理计划,以便更好地管理风险。\n\n5. 风险应对措施:针对不同的风险,需要采取不同的应对措施,例如转移风险、减轻风险、避免风险等。要根据实际情况制定风险应对措施,并及时跟进执行情况。\n\n总之,在项目中解决风险问题需要全面的计划和精细的执行,而且需要不断地监控和调整,以确保风险得到有效管理,项目能够按计划顺利完成。<|im_end|>\n<|im_start|>user\n如何在项目中确保高质量的交付成果?<|im_end|>\n<|im_start|>assistant\n在项目中确保高质量的交付成果是确保项目成功的关键之一,以下是一些方法:\n\n1. 确定质量标准:在项目计划的早期阶段,需要确定项目的质量标准和要求,包括功能、性能、可靠性等方面。这有助于确保项目的交付成果符合预期的质量标准。\n\n2. 质量计划:制定详细的质量计划,包括验收标准、测试计划、问题解决和纠正计划等方面。质量计划需要在项目计划中得到充分的体现。\n\n3. 质量控制:在项目执行过程中,需要不断地监控和控制项目的质量,例如利用检查表、测试工具等,确保交付成果符合质量标准和要求。\n\n4. 质量保证:在项目中,需要采取措施确保项目的质量,例如采用最佳实践、行业标准等,以确保交付成果符合预期的质量标准。\n\n5. 持续改进:持续改进是确保项目质量的重要手段,需要不断地反思和总结项目过程和结果,识别和改进项目中的质量问题,以不断提高交付成果的质量和效率。\n\n总之,在项目中确保高质量的交付成果需要全面的计划和精细的执行,而且需要不断地监控和调整,以确保交付成果符合预期的质量标准和要求。<|im_end|>\n<|im_start|>user\n我想让你当插画师。我会问你一些创作过程相关的问题,你要回答如何开始画,以及应该注意哪些方面。此外,你也可以提供有关故事板、构图、透明度和光泽的建议,并且可以使用不同的工具(水彩,原子笔,颜料等)创作出特定的效果。我的第一个��议请求是“我需要帮助绘制一幅风景画,包括一个小船在大海里行驶。<|im_end|>\n<|im_start|>assistant\n当开始画风景画并包括小船在大海里行驶时,以下是一些可以帮助你的建议: \n\n1. 故事板:在开始绘画任何东西之前,首先需要创建一个故事板。故事板是一个简单的草图,它有助于你确定画面中的元素和它们的位置。你可以使用纸张和铅笔来创建故事板。在这个阶段,你可以尝试不同的布局和构图,直到找到最合适的。 \n\n2. 构图:在确定好故事板之后,你需要考虑构图。你可以使用“第三点规则”来帮助你构建一个有吸引力的画面。这意味着你将画面分成九个相等的部分,然后将物体和元素放置在交叉点上,这将使画面看起来更加平衡。 \n\n3. 工具和材料:你可以使用不同的工具和材料来创造不同的效果。如果你要使用水彩颜料,你可以将海洋区域涂成深蓝色,然后使用浅蓝色和白色来创造波浪效果。对于小船,你可以使用棕色和白色来增加细节。使用原子笔可以为画面添加更多的细节,例如海鸥和云朵。 \n\n4. 透明度和光泽:如果你想要创造出水的透明感和光泽,你可以在海洋区域使用淡蓝色和白色来描绘海浪上的泡沫和浪花。对于小船,你可以使用白色添加高光效果,以增加凹凸感。 \n\n最重要的是,创作过程应该是有趣的,因此不要害怕在过程中尝试新的想法和技巧。祝你好运!<|im_end|>\n<|im_start|>user\n这些建议真的很有用,谢谢你!那么当我想要创作一个更加复杂和详细的插画时,比如一幅人物肖像画,应该注意哪些方面呢?<|im_end|>\n<|im_start|>assistant\n当你想要创作一个更加复杂和详细的插画,比如人物肖像画时,以下是一些你应该注意的方面:\n\n1. 面部特征:人物肖像画最重要的部分是面部特征。你需要特别注意眼睛、鼻子和嘴巴的位置和比例。使用参考图像或素描来帮助你确定面部特征的位置和比例。\n\n2. 光源和阴影:了解光线和阴影的原理将对创作真实的人物肖像画非常有帮助。你需要考虑光源的位置以及如何在面部特征上创建阴影和高光。\n\n3. 色彩:选择正确的色彩是创作人物肖像画的关键。使用肤色调色板来帮助你选择正确的色调和阴影颜色。你还需要考虑使用其他颜色来增加画面的深度和兴趣。\n\n4. 动态和姿势:如果你要创作一个具有动态和姿势的人物肖像画,你需要了解身体的比例和动态。使用参考图像或素描来帮助你确定身体的比例和动态。\n\n5. 材料和工具:你可以使用多种材料和工具来创造不同的效果。例如,使用颜料可以帮助你创建柔和的效果,而使用铅笔可以帮助你创造细节。\n\n最后,创作人物肖像画需要耐心和实践。不要害怕犯错,在不断练习和尝试之后,你会发现自己的技能和创造力不断提高。<|im_end|>\n<|im_start|>user\n好的,我想了解一些关于使用铅笔创作插画时需要注意哪些方面。<|im_end|>\n<|im_start|>assistant\n当你使用铅笔创作插画时,以下是一些需要注意的方面:\n\n1. 选择铅笔:铅笔有不同的硬度和软度,你需要选择合适的铅笔来创作插画。通常,HB铅笔是最常用的,但你也可以选择其他硬度和软度的铅笔来实现不同的效果。\n\n2. 使用线条:铅笔最常用于绘制线条,因此你需要注意线条的粗细和质地。使用不同程度的压力可以创造出不同粗细的线条。使用交叉线条可以创造出更多的细节和深度。\n\n3. 阴影和高光:铅笔可以用于创建阴影和高光。使用不同的铅笔硬度可以达到不同的阴影效果。使用橡皮擦可以帮助你创造出高光效果。\n\n4. 线条方向:铅笔的线条方向对插画的效果有很大的影响。你需要考虑线条的方向和位置,以确保插画看起来自然和有生命力。\n\n5. 纸张:选择合适的纸张非常重要,因为不同的纸张可以影响铅笔的颜色和纹理。选择光滑的纸张可以帮助你创造出更精细的线条和细节,而选择粗糙的纸张可以帮助你创造出更加自然的效果。\n\n最后,使用铅笔创作插画需要耐心和练习。一旦你熟练掌握了铅笔的使用技巧,你可以开始尝试不同的风格和技巧来创造出独特的插画。<|im_end|>\n<|im_start|>user\n当我想要创作一个有关科学的插画时,应该注意哪些方面呢?<|im_end|>\n<|im_start|>assistant\n当你想要创作一个有关科学的插画时,以下是一些需要注意的方面:\n\n1. 研究和理解主题:在着手创作插画之前,你需要研究和理解你要创作的主题。了解科学原理和概念可以帮助你创造出准确和有意义的插画。\n\n2. 故事板和布局:创建故事板可以帮助你规划和布局插画。在创建故事板时,你可以考虑使用适当的比例和尺寸来突出插画的主题和重点。\n\n3. 颜色和材料:选择合适的颜色和材料可以帮助你创造出特定的效果。在创作有关科学的插画时,你可以使用科学相关的颜色和材料,例如蓝色、绿色、金属和玻璃等。\n\n4. 细节和精度:科学插画通常需要高度的精度和细节。你需要花费时间和精力来确保插画的每个元素和细节都准确和精细。\n\n5. 图例和标注:在插画中添加图例和标注可以帮助观众更好地理解科学概念和原理。使用清晰和易于理解的语言和符号可以使插画更加易于理解。\n\n最后,创作有关科学的插画需要耐心和实践。通过不断的实践和反思,你可以不断提高自己的技能和创造力,创造出更加准确和有意义的插画。<|im_end|>\n<|im_start|>user\nWhat are the potential applications of a comprehensive catalog of facial expressions and emotions?\n\nIn order to create a detailed catalog of facial expressions and emotions captured in photographs, it is imperative to establish a comprehensive register. This may involve utilizing an existing system or developing a new one that accounts for a wide range of emotions and facial expressions. This task requires meticulous attention to detail and an understanding of the nuances of nonverbal communication. The resulting catalog will provide valuable insights into the human experience and can be utilized in a variety of fields, from psychology to advertising. Achieving a comprehensive catalog of facial expressions and emotions will require patience, careful observation, and a dedication to accuracy.<|im_end|>\n<|im_start|>assistant\nThe potential applications of a comprehensive catalog of facial expressions and emotions are extensive. Here are a few examples:\n\n1. Psychology: A detailed catalog of facial expressions and emotions can aid in the diagnosis and treatment of mental health disorders. Psychologists can use this catalog to identify subtle changes in facial expressions that may indicate a shift in a patient’s emotional state.\n\n2. Anthropology: Studying facial expressions and emotions across different cultures and societies can provide insights into the human experience and how emotions are expressed and interpreted across different groups.\n\n3. Marketing and Advertising: A catalog of facial expressions and emotions can aid in the creation of more effective advertisements by helping marketers understand how different emotional states are expressed on the face and how those expressions can be used to appeal to consumers.\n\n4. Technology: Facial recognition technology is becoming more advanced and a comprehensive catalog of facial expressions and emotions can aid in the development of more accurate and reliable facial recognition software.\n\n5. Education: A catalog of facial expressions and emotions can be used in classrooms to help students understand how emotions are expressed on the face and how to interpret those expressions in others.\n\nOverall, a comprehensive catalog of facial expressions and emotions can provide valuable insights into the human experience and has the potential to impact a wide range of fields.<|im_end|>\n<|im_start|>user\nHow do I protect my data when using a chatbot?<|im_end|>\n<|im_start|>assistant\nThere are a few things you can do to protect your data when using a chatbot:\n\n1. Only share necessary information: Only provide the chatbot with the information it needs to perform its intended function. Avoid sharing sensitive information like your social security number, credit card information or passwords.\n\n2. Use a secure connection: Make sure the chatbot is accessed through a secure connection (i.e. HTTPS) to ensure that any information you share is encrypted and cannot be intercepted.\n\n3. Verify the chatbot\'s identity: Before sharing any information, verify that you are interacting with a legitimate chatbot. Check that the chatbot is associated with a reputable company or organization and that its website/URL is legitimate.\n\n4. Read the privacy policy: Check the chatbot\'s privacy policy to see how your data will be used and if it will be shared with any third parties.\n\n5. Avoid public Wi-Fi: Avoid using public Wi-Fi networks when interacting with a chatbot, as these networks are often unsecured and can leave your data vulnerable to hacking.\n\n6. Use strong passwords: If the chatbot requires you to create an account, use strong, unique passwords to protect your data from unauthorized access.<|im_end|>\n<|im_start|>user\nCan you recommend any tools that can help me create strong passwords for chatbots and other online accounts?<|im_end|>\n<|im_start|>assistant\nYes, there are several tools you can use to create strong passwords for chatbots and other online accounts. Here are a few options:\n\n1. LastPass: This is a popular password manager that can generate and store strong passwords for you. It also has a feature that can automatically fill in login information for you.\n\n2. 1Password: Another password manager that can generate strong passwords and store them securely. It can also alert you if any of your passwords have been compromised.\n\n3. Dashlane: A password manager that can generate strong passwords and store them securely. It also has a feature that can automatically fill in login information for you.\n\n4. KeePass: This is a free, open-source password manager that can generate strong passwords and store them securely. It can also be used offline, making it a good option if you don\'t want to rely on a cloud-based service.\n\n5. Norton Password Generator: This is a free online tool that can generate strong passwords for you. It allows you to specify the length and complexity of the password, and can also generate passwords that are easy to remember.<|im_end|>\n<|im_start|>user\nCan you explain how a chatbot uses natural language processing to understand user input?<|im_end|>\n<|im_start|>assistant\nOf course! Natural Language Processing (NLP) is a technology that enables chatbots to understand and interpret human language. Here\'s how it works:\n\n1. Tokenization: The chatbot breaks down the user\'s input into individual words and phrases, called "tokens."\n\n2. Part-of-speech tagging: The chatbot analyzes the tokens to determine their grammatical function, such as whether they are a noun, verb, or adjective.\n\n3. Parsing: The chatbot analyzes the structure of the sentence to understand how the words and phrases relate to each other.\n\n4. Named entity recognition: The chatbot identifies and categorizes specific entities in the user\'s input, such as names, dates, and locations.\n\n5. Sentiment analysis: The chatbot evaluates the emotional tone of the user\'s input, such as whether it is positive, negative, or neutral.\n\n6. Machine learning: The chatbot uses machine learning algorithms to learn from past interactions and improve its understanding of natural language over time.\n\nBy using NLP, chatbots are able to understand user input in a more human-like way, allowing them to provide more accurate and helpful responses.<|im_end|>\n<|im_start|>user\nCan chatbots understand multiple languages? How does that work?<|im_end|>\n<|im_start|>assistant\nYes, chatbots can be designed to understand and respond in multiple languages. Here\'s how it works:\n\n1. Language detection: When a user interacts with a chatbot, the chatbot first detects the language of the user\'s input. This is done using algorithms that analyze the patterns and structure of the text.\n\n2. Translation: Once the language of the user\'s input is detected, the chatbot translates the input into the language it is programmed to understand. This is done using machine translation algorithms that analyze the grammar and vocabulary of the text.\n\n3. Natural language processing: After the input is translated, the chatbot uses natural language processing (NLP) to understand and interpret the user\'s input, just as it would for input in a single language.\n\n4. Response generation: Once the chatbot has interpreted the user\'s input, it generates a response in the same language as the user\'s original input. The response is then translated into the user\'s language using the same machine translation algorithms.\n\nWhile chatbots can be designed to understand and respond in multiple languages, it\'s important to note that the accuracy of the translations and the quality of the responses will depend on the quality of the machine translation algorithms and the chatbot\'s NLP capabilities.<|im_end|>\n<|im_start|>user\nCan chatbots be trained to recognize specific industry jargon or technical terms?<|im_end|>\n<|im_start|>assistant\nYes, chatbots can be trained to recognize specific industry jargon or technical terms. Here\'s how it works:\n\n1. Data collection: The first step is to collect a large amount of data that contains the industry jargon or technical terms that the chatbot needs to recognize. This data can come from various sources, such as industry-specific documents, manuals or websites.\n\n2. Data labeling: Once the data is collected, it needs to be labeled to identify which words or phrases are industry jargon or technical terms. This can be done manually or using machine learning algorithms.\n\n3. Machine learning: The chatbot can then be trained using machine learning algorithms that analyze the labeled data and learn to recognize the industry jargon or technical terms.\n\n4. Natural language processing: Once the chatbot has been trained, it can use natural language processing (NLP) to understand and interpret user input that contains the industry jargon or technical terms.\n\nBy training chatbots to recognize industry jargon or technical terms, they can provide more accurate and helpful responses to users in that particular industry or field. This can improve the overall user experience and increase the efficiency of the chatbot in performing its intended function.<|im_end|>\n<|im_start|>user\nCan chatbots be integrated with other AI technologies like machine learning or computer vision?<|im_end|>\n<|im_start|>assistant\nYes, chatbots can be integrated with other AI technologies like machine learning or computer vision. Here\'s how it works:\n\n1. Machine learning: Chatbots can be trained using machine learning algorithms to improve their understanding of natural language and to personalize their responses based on user behavior. For example, a chatbot for an e-commerce website can learn from past interactions to provide personalized product recommendations to users.\n\n2. Computer vision: Chatbots can also be integrated with computer vision technology to enable them to recognize and interpret visual data. For example, a chatbot for a home security system can use computer vision to analyze live video feeds and alert users if it detects any suspicious activity.\n\n3. Speech recognition: Chatbots can be designed to recognize and respond to voice commands using speech recognition technology. This can be particularly useful in situations where users need to use their hands for other tasks, such as driving a car.\n\n4. Natural language understanding: Chatbots can also be integrated with natural language understanding (NLU) technology to enable them to understand and interpret user input more accurately. NLU can help chatbots recognize and respond to variations in user input, such as different dialects or accents.\n\nBy integrating chatbots with other AI technologies, businesses can create more powerful and sophisticated tools that can provide more personalized and efficient services to users.<|im_end|>\n<|im_start|>user\nCan chatbots be programmed to recognize sarcasm and respond appropriately?<|im_end|>\n<|im_start|>assistant\nYes, chatbots can be programmed to recognize sarcasm and respond appropriately, although it can be challenging due to the subtle and nuanced nature of sarcasm. Here\'s how it works:\n\n1. Sentiment analysis: Chatbots use sentiment analysis to determine the emotional tone of a user\'s input, including detecting sarcasm. This involves analyzing the language used, the context of the conversation, and other factors to determine if the user\'s input is sarcastic or not.\n\n2. Contextual analysis: Chatbots also use contextual analysis to understand the meaning behind the user\'s input. This involves analyzing the conversation history, the user\'s profile information, and any other relevant data to determine the intended meaning of the user\'s input.\n\n3. Response generation: Once the chatbot has determined that the user\'s input is sarcastic, it can generate an appropriate response. This response could be humorous, or it could acknowledge the sarcasm and provide a serious response.\n\nWhile chatbots can be programmed to recognize sarcasm, it\'s important to note that there is no guarantee that they will always get it right. Sarcasm is complex and can be difficult to interpret, even for humans. Nonetheless, with the right programming and algorithms, chatbots can be designed to recognize and respond to sarcasm in a way that is engaging and appropriate.<|im_end|>\n<|im_start|>user\nCan chatbots be programmed to understand and respond appropriately to different cultural contexts?<|im_end|>\n<|im_start|>assistant\nYes, chatbots can be programmed to understand and respond appropriately to different cultural contexts. Here\'s how it works:\n\n1. Cultural awareness: Chatbots need to be programmed with an understanding of different cultural norms and customs. This involves understanding the language, social norms, and values of the culture they are interacting with.\n\n2. Contextual analysis: Chatbots need to analyze the context of the conversation to determine the appropriate response. This involves taking into account the user\'s cultural background, previous interactions, and any other relevant information to provide a culturally appropriate response.\n\n3. Personalization: Chatbots can be programmed to personalize their responses based on the user\'s cultural background. This can include using appropriate greetings, addressing the user by their preferred name or title, and using appropriate language and tone.\n\n4. Localization: Chatbots can be localized to different regions, which involves translating the chatbot\'s responses into the appropriate language and adapting them to the local cultural context.\n\nBy programming chatbots to understand and respond appropriately to different cultural contexts, businesses can provide a more personalized and engaging experience for users from different cultural backgrounds. This can improve customer satisfaction and help businesses build stronger relationships with their customers.<|im_end|>\n<|im_start|>user\n如何在Python中使用递归来解决Fibonacci序列问题<|im_end|>\n<|im_start|>assistant\nFibonacci序列是一个经典的递归问题。在Python中,使用递归函数来解决Fibonacci序列问题非常简单。以下是一个示例代码,用于计算Fibonacci序列中的第n个数字。\n\n```python\ndef fibonacci(n):\n if n <= 1:\n return n\n else:\n return fibonacci(n-1) + fibonacci(n-2)\n```\n\n在这个递归函数中,如果n小于或等于1,��返回n本身。否则,通过调用函数本身来计算前两个数字的和。这个递归函数可以计算任何Fibonacci序列中的任何数字。\n\n例如,要计算Fibonacci序列中的第10个数字,可以调用`fibonacci(10)`。<|im_end|>\n<|im_start|>user\n这个递归函数能计算多大的Fibonacci数列?<|im_end|>\n<|im_start|>assistant\n这个递归函数能计算的Fibonacci数列的大小取决于计算机的内存和性能。由于递归函数的特性,每次调用函数时都会创建一个新的函数栈帧,因此当计算Fibonacci序列的数字变得非常大时,这个递归函数可能会导致栈溢出或计算时间非常长。\n\n在实际应用中,如果需要计算非常大的Fibonacci数列,可以使用其他更高效的算法,例如迭代方法或矩阵乘法。这些方法可以更快地计算大型Fibonacci数列,而不会导致栈溢出或计算时间过长。<|im_end|>\n<|im_start|>user\n那么,你能否向我介绍一下使用迭代方法来计算Fibonacci数列的代码实现呢?<|im_end|>\n<|im_start|>assistant\n当需要计算非常大的Fibonacci数列时,使用迭代方法比使用递归函数更高效,因为迭代方法不会导致栈溢出,并且计算时间更短。以下是一个使用迭代方法来计算Fibonacci数列的示例代码:\n\n```python\ndef fibonacci(n):\n if n <= 1:\n return n\n else:\n a, b = 0, 1\n for i in range(n-1):\n a, b = b, a + b\n return b\n```\n\n在这个迭代函数中,如果n小于或等于1,则返回n本身。否则,使用两个变量a和b来计算Fibonacci数列中的每个数字。在每个循环迭代中,将a和b的值更新为b和a+b,直到计算到第n个数字为止。最后返回b的值即可。\n\n与递归函数不同,这个迭代函数不会导致栈溢出,因此可以计算非常大的Fibonacci数列。<|im_end|>\n<|im_start|>user\n在Python中如何实现一个二分查找算法呢?<|im_end|>\n<|im_start|>assistant\n二分查找是一种常用的查找算法,它可以在已经排好序的数组(或列表)中快速地查找特定的值。以下是一个使用Python实现的简单二分查找算法:\n\n```python\ndef binary_search(arr, target):\n low, high = 0, len(arr) - 1\n while low <= high:\n mid = (low + high) // 2\n if arr[mid] == target:\n return mid\n elif arr[mid] < target:\n low = mid + 1\n else:\n high = mid - 1\n return -1\n```\n\n这个函数接受两个参数:一个已经排好序的数组(或列表)arr和需要查找的目标值target。函数使用两个变量low和high来跟踪数组中目前正在查找的部分。在每个循环迭代中,计算中间值mid,并比较mid与目标值target的大小关系,从而将查找范围缩小到较小的一半。如果mid等于target,则返回mid的索引。否则,如果mid小于target,则将low更新为mid+1,否则将high更新为mid-1。如果在查找范围中找不到目标值,则返回-1。\n\n注意,这个二分查找算法的前提是数组已经排好序了。如果数组没有排好序,则需要先对数组进行排序,然后再使用二分查找算法。<|im_end|>\n<|im_start|>user\n我想了解一下Python中的列表推导式是什么?<|im_end|>\n<|im_start|>assistant\n在Python中,列表推导式是一种快速创建列表的方法。它允许您使用简洁的语法从一个可迭代对象中创建一个新的列表。以下是一个简单的示例:\n\n```python\nnumbers = [1, 2, 3, 4, 5]\nsquares = [x**2 for x in numbers]\nprint(squares)\n```\n\n这个列表推导式将一个数字列表中的每个元素平方,并创建一个新的列表。输出结果为:\n\n```\n[1, 4, 9, 16, 25]\n```\n\n列表推导式的语法非常简单:在中括号内部,指定一个表达式,然后使用一个for循环来遍历可迭代对象。在for循环中,您可以使用表达式来转换可迭代对象的元素,并将结果存储在新的列表中。\n\n您还可以添加if语句来过滤可迭代对象的元素。例如,以下示例从一个数字列表中选择所有偶数,并创建一个新的列表:\n\n```python\nnumbers = [1, 2, 3, 4, 5]\nevens = [x for x in numbers if x % 2 == 0]\nprint(evens)\n```\n\n输出结果为:\n\n```\n[2, 4]\n```\n\n在这个列表推导式中,使用if语句来过滤数字列表中的奇数。只有当数字可以被2整除时,才将数字添加到新列表中。<|im_end|>\n<|im_start|>user\n除了使用列表推导式,Python中还有哪些快速创建序列的方法呢?<|im_end|>\n<|im_start|>assistant\n除了列表推导式,Python中还有很多其他快速创建序列的方法。以下是其中的一些示例:\n\n1. 使用range()函数创建数字序列\n\n您可以使用内置的range()函数创建数字序列。例如,以下代码创建一个数字序列,其中包含从0到9(不包括10)的整数:\n\n```python\nnumbers = list(range(10))\nprint(numbers)\n```\n\n输出结果为:\n\n```\n[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n```\n\nrange()函数可以接受一个或多个参数,用于指定数字序列的开始、结束和步长。\n\n2. 使用*运算符创建重复序列\n\n您可以使用*运算符创建一个重复序列。例如,以下代码创建一个由5个空字符串组成的列表:\n\n```python\nstrings = [\'\'] * 5\nprint(strings)\n```\n\n输出结果为:\n\n```\n[\'\', \'\', \'\', \'\', \'\']\n```\n\n这个方法可以用于创建任何重复元素的序列。\n\n3. 使用生成器表达式创建序列\n\n类似于列表推导式,生成器表达式也可以用于创建序列。但是,生成器表达式返回的是一个生成器对象,而不是一个列表。这意味着,生成器表达式可以在需要时逐个生成元素,从而减少内存使用。\n\n例如,以下代码使用生成器表达式创建一个包含所有偶数的生成器对象:\n\n```python\nevens = (x for x in range(10) if x % 2 == 0)\nprint(list(evens))\n```\n\n输出结果为:\n\n```\n[0, 2, 4, 6, 8]\n```\n\n这个方法可以用于创建任何需要逐个生成元素的序列。<|im_end|>\n<|im_start|>user\nA student wants to find the average value of the function f(x) = sin(x) on the interval [0,2π]. What is the average value of f(x)?<|im_end|>\n<|im_start|>assistant\nTo find the average value of a function on a given interval, we can use the formula:\n\nAverage value = (1/(b-a)) * ∫[a, b] f(x) dx\n\nIn this case, the function is f(x) = sin(x), and the interval is [0, 2π]. So, a = 0 and b = 2π.\n\nAverage value = (1/(2π-0)) * ∫[0, 2π] sin(x) dx\n\nNow, we need to find the integral of sin(x) from 0 to 2π:\n\n∫ sin(x) dx = -cos(x) + C\n\nNow, we\'ll evaluate the integral at the bounds:\n\n[-cos(2π) + cos(0)] = [-cos(2π) + 1] = [1 - 1] = 0\n\nSo, the integral of sin(x) from 0 to 2π is 0.\n\nNow, we\'ll plug this back into the average value formula:\n\nAverage value = (1/(2π-0)) * 0 = 0\n\nTherefore, the average value of f(x) = sin(x) on the interval [0, 2π] is 0.<|im_end|>\n<|im_start|>user\nFind the period and amplitude of the trigonometric function f(x) = 5 cos(2x - π/4).<|im_end|>\n<|im_start|>assistant\nTo find the period and amplitude of the given trigonometric function f(x) = 5 cos(2x - π/4), we need to identify the key features of the function.\n\nAmplitude:\nThe amplitude of a trigonometric function is the absolute value of the coefficient of the cosine or sine function. In this case, the coefficient of the cosine function is 5. Therefore, the amplitude is |5| = 5.\n\nPeriod:\nThe period of a trigonometric function is the length of one complete cycle of the function. For a cosine or sine function, the period is given by the formula:\n\nPeriod = (2π) / |B|\n\nwhere B is the coefficient of the variable x inside the cosine or sine function. In this case, B = 2. Therefore, the period is:\n\nPeriod = (2π) / |2| = (2π) / 2 = π\n\nSo, the amplitude of the function f(x) = 5 cos(2x - π/4) is 5, and the period is π.<|im_end|>\n<|im_start|>user\nÍrj egy rövid történetet a léghajókról. írd le, hogy mikor készültek először!<|im_end|>\n<|im_start|>assistant\nTíz híres léghajó - Háború Művészete\nArt of WAR | 2015. 10. 06., 10:14\n2361 | Elolvasva\nAz első kormányozható léghajó\nA hőlégballonhoz hasonlóan az első kormányozható légi jármü is Franciaországban emelkedett a levegőben. A léghajó ősét Henri Giffard építette meg 1852-ben. A kezdetleges léghajót egy kis teljesítményü gőzgép hajtotta, és gyorsnak sem volt éppen nevezhető a maga kilenc kilométer per órás végsebességével, mégis hatalmas áttörést jelentett a korábbi hőlégballonokhoz képest, amelyek arra tudtak csupán repülni, amerre a szél vitte őket.\nAz első "igazi" léghajó\nAz első valóban használható léghajóra egészen a századfordulóig kellett várni. A Ferdinand von Zeppelin gróf által benyújtott szabadalom alapján 1899-ben kezdték el építeni Friedrichshafen mellett egy úszó hangárban a Boden-tavon. Zeppelin a léghajó tervét a magyar születésü Schwarz Dávid özvegyétől vásárolta meg, majd ezt fejlesztette tovább. Az LZ-1 nevü léghajó 1900. július 2-án emelkedett először a levegőbe. Meghajtásáról két, egyenként tizennégy lóerős Daimler benzinmotor gondoskodott, alakja pedig már emlékeztetett a később elterjedt szivar formájú léghajókéra.\nLéghajók a háborúban\nA német hadsereg szinte a kezdetektől érdeklődött a léghajók iránt. Mire kitört az első világháború, már számos példány szolgált a seregben, amelyek alkalmazásától nagyszabású eredményeket várt a vezérkar. Bár ezeket az irreális reményeket végül nem váltották valóra, a léghajók képesek voltak elérni és bombázni Londont, emellett számos felderítő repülést hajtottak végre. Szintén Anglia elleni bombatámadásra indult az L33 jelzésü léghajó is, amelyet a brit légvédelem tüze eltalálta, és Little Wigboroughnál lezuhant. Az angolok megvizsgálták a roncsot, és a motort is sikerült megmenteniük, a tapasztalatokat felhasználva pedig sikerült modernebb léghajókat építeniük. A világháborús katonai léghajózásnak van magyar vonatkozása is. Az egyetlen magyar léghajókapitány Poppe Kornél volt, aki egyebek mellett Bukarestet is bombázta a háború alatt.\nLZ104 – a csapatszállító világrekorder\nAz LZ104 –es léghajó példája bizonyította, hogy milyen nagyot fejlődött a léghajózás a háború alatt. A Bulgáriába telepített LZ104 arra kapott utasítást 1917 novemberében, hogy szállítson erősítést a Német Kelet-Afrikában harcoló német csapatoknak. A léghajó, fedélzetén a katonákkal útnak is indult, de mire célba ért volna, az angolok győztek a gyarmaton folyó harcban, így az LZ 104-nek vissza kellett fordulnia. Mindazonáltal a léghajónak sikerült megdönteni a távrepülés korabeli rekordját, mivel a közel hétezer kilométeres távot kilencvenöt óra alatt teljesítette.\nAz első héliummal töltött léghajó\nA USS Shenandoah két szempontból is első volt. Ez volt az első merevvázas léghajó, amelyet az Egyesült Államok területén raktak össze, és az első olyan léghajó, amelyet a robbanékony hidrogén helyett a biztonságos héliummal töltöttek fel. Ez a korabeli feltételek mellett nem kis bravúr volt, ugyanis a hélium ritka és drága gáznak számított. A készletek szükössége miatt a legénységnek takarékoskodni kellett a gázzal, és ha egy másik léghajót is fel akartak tölteni, a Shenandoah-ból kellett kiszivattyúzni a héliumot, mivel az USA-nak akkoriban nem volt akkora hélium-készlete, hogy egyszerre két léghajót is megtöltsenek vele.\nÉgi anyahajó\nA léghajóból kialakított repülő anyahajó ötlete az Egyesült Államokban vetődött fel az első világháború utáni években. Mivel a léghajók gyorsabbak voltak és kevesebbet fogyasztottak, mint a vízen közlekedő társaik, így megpróbálták a gyakorlatba is átültetni a repülőgép-hordozó léghajó terveit. Először a Los Angeles-t alakították át erre a célra. A kétszáz méteres léghajó belsejében külön hangárt alakítottak ki a repülőgépek számára. A könnyebb üzemeltetés kedvéért új repülőgéptípust is kifejlesztettek, a Curtiss F9C "Sparrowhawk" kis szárnyfesztávolságú, könnyü gép volt, ezekből négyet helyeztek el a léghajóban.\nA történelem legnagyobb léghajóbalesetét okozta az óriási repülőgép-hordozó\nAz 1931-ben elkészült USS Akron-t a kezdetektől repülőgép-hordozónak szánták. A hatalmas léghajó mérete alig maradt el a Hindenburgétól, szállító kapacitása meghaladta a nyolcvan tonnát, sebessége pedig a százharminc kilométer per órát is. Az óriás égi anyahajó müködését azonban folyamatos balesetek kísérték. Volt, hogy a szél csapta földhöz, és olyan is, hogy elszakította a horgonyát és nekiütközött a hangárnak. Az utolsó, végzetes balesete szomorú rekordot állított fel – a mai napig ez a léghajózás történetének legnagyobb balesete. A hatalmas léghajó viharba került az Atlanti-óceán fölött, ahol a szél miatt kormányozhatatlanná vált és a vízbe zuhant hetvenhárom ember halálát okozva.\nLéghajóval a Föld körül\n1928-ban került ki a gyárból az LZ-127-es léghajó, amelyet Ferdinand Zeppelin tiszteletére a Graf Zeppelin nevet kapta. Akkoriban ez volt a világ legnagyobb légi jármüve. A Graf Zeppelin számos hosszú út után hatalmas feladatra vállalkozott – elindult, hogy megkerülje a Földet. Míg a híres Verne-regény főszereplőjének, Phileas Foggnak nyolcvan napig tartott Föld körüli útja, a léghajónak ehhez huszonegy napra, öt órára és harmincegy percre volt csupán szüksége.\nA Hindenburg katasztrófájával véget ért a léghajók kora\n1936. március 4-én emelkedett a levegőbe minden idők legnagyobb léghajója. A Hindenburgról elnevezett óriást az akkor már életben lévő katonai embargó miatt nem tudták a biztonságos héliummal megtölteni, amit az USA már nem szállított a náci Németországnak. Ezért a monstrumot a veszélyes hidrogénnel töltötték fel, amely később aztán a vesztét okozta. A hatalmas léghajó legfontosabb feladata utasszállítás volt a transzatlanti útvonalon. Éppen egy ilyen úton történt az a végzetes katasztrófa is, amely a vesztét okozta. 1937. május 6-án éppen kikötött az amerikai Lakehurstben, amikor tüzet fogott és alig pár másodperc alatt porig égett.\nMég repült néhányszor a Hindenburg testvérhajója\nA Hindenburg katasztrófája gyakorlatilag megpecsételte a léghajózás sorsát, de testvérhajója, a Graf Zeppelin II még teljesített néhány küldetést. A hidrogénnel töltött léghajót utasszállításra már nem használták, hanem átadták a Luftwaffénak. A mielőtt kitört volna a második világháború, számos próbarepülést is végzett, többségét a lengyel határ közelében. Egy ilyen repülés során át is "tévedt" a szomszédos ország területére, ahonnan a lengyel légierő tessékelte vissza a betolakodót.<|im_end|>\n<|im_start|>user\n怎样利用Python设计面向对象的函数?<|im_end|>\n<|im_start|>assistant\n在Python中,可以使用类和对象来实现面向对象编程。以下是使用Python设计面向对象的函数的步骤:\n\n1. 定义类:使用class关键字定义一个类,类名通常以大写字母开头。在类中可以定义属性和方法。\n\n```python\nclass MyClass:\n def __init__(self, name):\n self.name = name\n \n def say_hello(self):\n print("Hello, " + self.name)\n```\n\n2. 创建对象:使用类名创建一个对象。可以通过对象访问类中的属性和方法。\n\n```python\nmy_object = MyClass("Alice")\nmy_object.say_hello() # 输出:Hello, Alice\n```\n\n3. 继承类:可以通过继承一个已有的类来创建一个新的类。子类可以重写父类的方法或者添加新的方法。\n\n```python\nclass MyChildClass(MyClass):\n def say_hello(self):\n print("Bonjour, " + self.name)\n \n def say_goodbye(self):\n print("Au revoir, " + self.name)\n```\n\n4. 重载运算符:可以通过在类中定义特殊的方法来重载运算符。例如,可以重载"+"运算符来实现两个对象的加法。\n\n```python\nclass MyNumber:\n def __init__(self, value):\n self.value = value\n \n def __add__(self, other):\n return MyNumber(self.value + other.value)\n```\n\n使用上述方法可以方便地设计面向对象的函数。<|im_end|>\n<|im_start|>user\nPython的类中有哪些特殊方法可以被重载?<|im_end|>\n<|im_start|>assistant\nPython中的类可以重载许多特殊方法,以使用自定义行为来支持类的实例化、比较、算术运算、迭代等操作。以下是一些常见的特殊方法:\n\n- `__init__(self[, args...])`: 构造函数,用于初始化新对象。当新对象创建时自动调用。\n- `__str__(self)`: 定义类的字符串表示形式。当使用 `print()` 函数或 `str()` 函数打印对象时调用。\n- `__repr__(self)`: 定义类的“官方”字符串表示形式。当使用 `repr()` 函数打印对象时调用。\n- `__eq__(self, other)`: 用于判断两个对象是否相等。当使用 `==` 运算符比较两个对象时调用。\n- `__lt__(self, other)`: 用于比较两个对象大小。当使用 `<` 运算符比较两个对象时调用。\n- `__add__(self, other)`: 定义两个对象相加的行为。当使用 `+` 运算符将两个对象相加时调用。\n- `__sub__(self, other)`: 定义两个对象相减的行为。当使用 `-` 运算符将两个对象相减时调用。\n- `__iter__(self)`: 定义可迭代对象的行为。当使用 `for...in` 循环迭代对象时调用。\n- `__next__(self)`: 定义迭代器的行为。当使用 `next()` 函数迭代对象时调用。\n- `__len__(self)`: 定义类的长度。当使用 `len()` 函数计算对象长度时调用。\n\n以上是一些常用的特殊方法,还有许多其他的特殊方法可以被重载以实现自定义行为。<|im_end|>\n<|im_start|>user\n我可以在Python中使用哪些方式来实现多重继承?<|im_end|>\n<|im_start|>assistant\n在Python中,可以使用以下三种方式来实现多重继承:\n\n1. 多个父类放在类定义的括号中,用逗号分隔:\n\n```python\nclass MyClass(ParentClass1, ParentClass2, ...):\n # class definition here\n```\n\n这种方式称为“直接多重继承”,可以同时继承多个父类的属性和方法。\n\n2. 通过调用 `super()` 函数来实现继承:\n\n```python\nclass MyClass(ParentClass1, ParentClass2, ...):\n def __init__(self, args):\n super().__init__(args)\n # additional initialization here\n```\n\n这种方式称为“使用 `super()` 函数的多重继承”,它使用 `super()` 函数来调用每个父类的方法。\n\n3. 使用混入类(Mixins)来组合多个父类:\n\n```python\nclass MixinClass1:\n # Mixin class definition here\n \nclass MixinClass2:\n # Mixin class definition here\n \nclass MyClass(ParentClass, MixinClass1, MixinClass2):\n # class definition here\n```\n\n这种方式称为“混入类的多重继承”,它使用混入类来组合多个父类的属性和方法。\n\n需要注意的是,多重继承可能会导致一些问题,如方法重名、属性冲突等。因此,在使用多重继承时需要谨慎考虑,避免出现不必要的复杂性。<|im_end|>\n<|im_start|>user\nPython中的Mixins是什么?它们有哪些用途和优势?<|im_end|>\n<|im_start|>assistant\n在Python中,Mixin是一个类,它不是为了独立地实例化或继承而设计的,而是为了通过多重继承的方式,向其他类添加新的方法或属性。Mixin类通常只包含一组相关的方法或属性,可以被多个类继承或组合使用。\n\nMixin的优势在于它可以实现代码复用和解耦,避免了重复编写相似代码的问题。同时,使用Mixin还可以实现代码的模块化和灵活性,使得代码更加可读、可维护和可扩展。\n\n下面是一些Mixin的常见用途:\n\n1. 提供额外的方法和属性:Mixin可以添加新的方法和属性,以扩展已有的类的功能。\n\n2. 实现特定的接口:Mixin可以实现特定的接口,以满足类的需求。例如,可以实现 `__str__()` 方法来提供对象的字符串表示形式。\n\n3. 实现某种行为:Mixin可以实现某种特定的行为,以提供更灵活的代码结构。例如,可以实现 `ComparableMixin` 类来实现对象之间的比较。\n\n下面是一个例子,演示如何使用Mixin实现一个可序列化的类:\n\n```python\nimport json\n\nclass SerializableMixin:\n def to_json(self):\n return json.dumps(self.__dict__)\n\nclass Person(SerializableMixin):\n def __init__(self, name, age):\n self.name = name\n self.age = age\n\nperson = Person("Alice", 30)\njson_str = person.to_json()\nprint(json_str) # 输出:{"name": "Alice", "age": 30}\n```\n\n在上面的例子中,`SerializableMixin` 类提供了 `to_json()` 方法,将对象转换成JSON字符串的形式。`Person` 类继承了 `SerializableMixin` 类,从而获得了 `to_json()` 方法。通过这种方式,我们可以轻松地将 `Person` 对象转换成JSON字符串的形式。<|im_end|>\n<|im_start|>user\nPython中的装饰器是什么?它们有哪些用途和特点?<|im_end|>\n<|im_start|>assistant\n在Python中,装饰器是一种函数或类,它可以用于修改或扩展其他函数或类的行为。装饰器可以像其他函数或类一样定义、传递、调用和返回,它们可以接受其他函数或类作为参数并返回修改后的函数或类。装饰器通常使用 `@decorator_name` 或 `decorated_function = decorator_name(decorated_function)` 的语法来应用到其他函数上。\n\n装饰器的优势在于它们可以实现代码的可重用性、可扩展性和可维护性,使得代码更加简洁、优雅和灵活。\n\n下面是一些装饰器的常见用途和特点:\n\n1. 函数修饰:装饰器可以用于修饰函数,以实现函数的参数检查、性能分析、日志记录、缓存和权限控制等功能。例如,可以使用装饰器实现一个计时器,来统计函数的执行时间:\n\n```python\nimport time\n\ndef timer(func):\n def wrapper(*args, **kwargs):\n start_time = time.time()\n result = func(*args, **kwargs)\n end_time = time.time()\n print("{} took {} seconds to run".format(func.__name__, end_time - start_time))\n return result\n return wrapper\n\n@timer\ndef my_function():\n time.sleep(2)\n\nmy_function() # 输出:my_function took 2.0000998973846436 seconds to run\n```\n\n在上面的例子中,`timer` 装饰器接受一个函数作为参数,返回一个新的函数 `wrapper`,该函数在执行原函数之前和之后记录时间。通过 `@timer` 语法将装饰器应用到 `my_function()` 函数上,从而实现了函数的计时功能。\n\n2. 类修饰:装饰器可以用于修饰类,以实现类的属性和方法的修改和扩展。例如,可以使用装饰器实现一个缓存机制,来缓存类的方法的结果,以提高性能。\n\n```python\ndef cache(func):\n cache = {}\n def wrapper(*args):\n if args in cache:\n return cache[args]\n result = func(*args)\n cache[args] = result\n return result\n return wrapper\n\nclass MyClass:\n @cache\n def my_method(self, arg):\n # expensive computation here\n```\n\n在上面的例子中,`cache` 装饰器接受一个方法作为参数,返回一个新的方法 `wrapper`,该方法在执行原方法之前和之后检查缓存。通过 `@cache` 语法将装饰器应用到 `MyClass.my_method()` 方法上,从而实现了方法的缓存功能。\n\n装饰器的特点在于它们可以嵌套、组合和链式调用,可以实现更加复杂和灵活的功能。同时,装饰器也可以通过类来实现,从而实现更加面向对象的设计。<|im_end|>\n<|im_start|>system\n请参考以下互联网知识回复用户最新的问题,参考程度为:高,在生成时注明所引用的知识的角标,前缀为[no_ref]的知识无需注明。\n[no_ref]可是他活下来了,平稳恢复中。很好很爱笑,一侧手脚还在康复中他父母很坚强乐观。我们来自全国各地乃至世界各地,自发帮助他家。大部分人并不富裕,每月几十,一百元的自发捐助他的医疗康复费。每次生活中有不愉快,去看看他的微笑照片,孩童纯真的微笑,像从未受过伤害。我就觉得这世界很好,生命是个奇迹。\n\n[no_ref]现实不美好,也不残酷。 它就只是现实而已。 无心的随手一答,这么多赞... 其实我这人性格很差嘴很贱,看到有评论说看到这个答案感受到了一丝温暖 谢谢 真的谢谢\n\n[no_ref]对他来说,这现实简直太美好啦。———缉毒是一项长期、复杂、艰苦、危险的行动。在「缉毒」与「贩毒...<|im_end|>\n<|im_start|>user\n现实可以有多美好?<|im_end|>\n<|im_start|>assistant\n现实的美好��度值得被许多因素所影响和决定,主要取决于个体的视角和经历。对于一些人来说,即使在遭遇困难或痛苦的经历后,他们也能找到希望和勇气,感受到生活的美好。如上述第一条,一个孩子经历了病痛,但他的乐观和坚韧让他找到了希望,并且引来了来自全国甚至全球的爱心援助,每一份微小的帮助都是生活的美好。\n\n尽管现实有时可能并不如人意,但也正如信息二所述,"现实不美好,也不残酷。它就只是现实而已。" 每个人对现实美好的理解和感受是千差万别的,这取决于个人的态度、价值观和生存环境。\n\n然而,对于一些专业人士或是处在特定情景中的人来讲,他们的现实可能因成功瞬间或是艰难克服的挑战而变得尤其美好,如信息三中对缉毒警察来说的那样,他们成功完成任务之后的现实可能就异常美好。\n\n总的来说,现实的美好是相对的,而且完全取决于如何去看待和处理你所面对的现实。也就是说,正如一个古老的说法, "美在观者之眼"。<|im_end|>\n<|im_start|>user\nImplement a data structure in C++ capable of storing key-value pairs<|im_end|>\n<|im_start|>assistant\nOne commonly used data structure for storing key-value pairs in C++ is the std::map container. It is implemented as a balanced binary search tree and allows for efficient access, insertion, and deletion of elements based on their keys. Here\'s an example code snippet:\n\n```c++\n#include \n#include \n\nint main() {\n // Create an empty map\n std::map myMap;\n\n // Insert key-value pairs into the map\n myMap.insert({"apple", 3});\n myMap.insert({"banana", 2});\n myMap.insert({"orange", 5});\n\n // Access values in the map using keys\n int numApples = myMap["apple"]; // numApples is 3\n\n // Update values in the map\n myMap["banana"] = 4;\n\n // Remove key-value pairs from the map\n myMap.erase("orange");\n\n return 0;\n}\n```\n\nIn this example, the std::map is templated with a key type of std::string and a value type of int. The insert() function is used to add key-value pairs to the map, and the [] operator is used to access and modify values by key. The erase() function removes key-value pairs from the map.<|im_end|>\n<|im_start|>user\nCan you explain the differences between std::map and std::unordered_map in C++?<|im_end|>\n<|im_start|>assistant\nYes, the main differences between `std::map` and `std::unordered_map` in C++ are:\n\n1. **Implementation:** `std::map` is implemented as a balanced binary search tree, while `std::unordered_map` is implemented as a hash table.\n\n2. **Ordering:** `std::map` maintains the order of elements based on their keys, while `std::unordered_map` does not guarantee any particular order of elements.\n\n3. **Search time:** `std::map` has a logarithmic search time complexity, while `std::unordered_map` has an average constant search time complexity. However, the worst-case time complexity for `std::unordered_map` can be linear.\n\n4. **Memory usage:** `std::unordered_map` typically uses more memory than `std::map`, due to the need for additional buckets and hash values.\n\n5. **Iterator stability:** `std::map` iterators remain valid even after insertions and deletions, while `std::unordered_map` iterators may become invalid if the hash table is resized.\n\nIn general, if you need to maintain order or if you have relatively small data sets, `std::map` may be a good choice. If you need faster search times or have larger data sets, `std::unordered_map` may be a better choice. However, the choice ultimately depends on your specific use case and performance needs.<|im_end|>\n<|im_start|>user\nCan you give an example of a use case where std::map would be more suitable than std::unordered_map?<|im_end|>\n<|im_start|>assistant\nYes, here\'s an example use case where `std::map` would be more suitable than `std::unordered_map`:\n\nSuppose you are building a dictionary application that needs to store a large number of English words and their definitions. You want to be able to quickly look up the definition of a given word. However, you also want to be able to display the words in alphabetical order.\n\nIn this case, you could use a `std::map` with the keys being the words and the values being the definitions. Since `std::map` maintains the order of the elements based on their keys, you can easily display the words in alphabetical order. Additionally, since the number of English words is not very large, the logarithmic search time complexity of `std::map` would not be a significant issue.\n\nOn the other hand, if you were building a spell checker that needs to check a large number of words against a dictionary, you may want to use an `std::unordered_map` for faster search times. In this case, maintaining order is not important, and the constant-time search complexity of `std::unordered_map` would be beneficial.<|im_end|>\n<|im_start|>user\nAre there any other data structures in C++ that can be used to store key-value pairs?<|im_end|>\n<|im_start|>assistant\nYes, there are several other data structures in C++ that can be used to store key-value pairs, including:\n\n1. **std::set** - A set is another associative container in C++ that stores unique elements in a sorted order. It can be used to store key-value pairs where the keys are unique and the values are not.\n\n2. **std::multiset** - A multiset is similar to a set, but allows for duplicate keys to be stored.\n\n3. **std::unordered_set** - An unordered_set is an unordered collection of unique elements. It can be used to store key-value pairs where the keys are unique and the order of the elements is not important.\n\n4. **std::vector>** - A vector of pairs is a simple way to store key-value pairs in C++. It can be used when the number of elements is known in advance and there are no requirements for efficient search, insertion, or deletion operations.\n\n5. **boost::unordered_map** - The Boost C++ library provides an unordered_map container that is similar to the std::unordered_map, but with some additional features and performance enhancements.\n\nThe choice of data structure depends on the specific needs and constraints of the problem being solved.<|im_end|>\n<|im_start|>user\nCan you explain how the std::map and std::unordered_map containers handle collisions?<|im_end|>\n<|im_start|>assistant\nYes, `std::map` and `std::unordered_map` handle collisions in different ways.\n\nIn `std::map`, collisions are not an issue since the elements are stored in a balanced binary search tree. Each node in the tree is sorted by its key, which ensures that there are no duplicate keys. If a key is inserted that already exists in the tree, the existing value is simply overwritten.\n\nOn the other hand, `std::unordered_map` uses a hash table to store its elements. A hash function is used to map each key to a bucket in the table. If two or more keys map to the same bucket, a collision occurs. `std::unordered_map` uses a technique called chaining to handle collisions, where the elements that map to the same bucket are stored in a linked list. When searching for an element in the hash table, the hash function is used to determine the bucket, and then the linked list is searched sequentially until the element is found.\n\nIf the number of elements in the hash table becomes too large, causing too many collisions and slowing down search times, `std::unordered_map` will automatically resize the table to maintain a load factor of less than 1. This involves creating a new, larger table, rehashing the elements, and then copying them to the new table. The old table is then deleted. This can be an expensive operation, so it is important to choose appropriate initial sizes and load factors for the hash table based on the expected number of elements and the performance requirements of the application.<|im_end|>\n<|im_start|>user\nCan you explain the difference between pass by value and pass by reference in C++?<|im_end|>\n<|im_start|>assistant\nYes, in C++, there are two ways to pass arguments to functions: pass by value and pass by reference.\n\n**Pass by value:** When a function is called with arguments passed by value, the values of the arguments are copied into the function\'s parameters. This means that any changes made to the parameters inside the function do not affect the original values of the arguments outside the function. For example:\n\n```\nvoid addOne(int x) { // pass by value\n x = x + 1;\n}\n\nint main() {\n int num = 5;\n addOne(num);\n std::cout << num; // Output: 5\n return 0;\n}\n```\n\nIn this example, `addOne()` takes an argument `x` passed by value. When `addOne(num)` is called, the value of `num` (which is 5) is copied into `x`. The function then adds 1 to the value of `x`, but this does not affect the value of `num` outside the function.\n\n**Pass by reference:** When a function is called with arguments passed by reference, the memory address of the arguments is passed to the function\'s parameters. This means that any changes made to the parameters inside the function affect the original values of the arguments outside the function. For example:\n\n```\nvoid addOne(int& x) { // pass by reference\n x = x + 1;\n}\n\nint main() {\n int num = 5;\n addOne(num);\n std::cout << num; // Output: 6\n return 0;\n}\n```\n\nIn this example, `addOne()` takes an argument `x` passed by reference. When `addOne(num)` is called, the memory address of `num` is passed to `x`. The function then adds 1 to the value of `x`, which affects the value of `num` outside the function.\n\nPassing arguments by reference can be more efficient than passing by value, since it avoids the need to copy large data structures. However, it can also be more dangerous, since it allows functions to modify the values of variables outside their own scope, which can lead to unexpected behavior if not used carefully.<|im_end|>\n<|im_start|>user\nAre there any situations where pass by value is preferable to pass by reference in C++?<|im_end|>\n<|im_start|>assistant\nYes, there are situations where pass by value is preferable to pass by reference in C++.\n\nHere are some examples:\n\n1. **Function immutability:** If you want to ensure that a function does not modify its input arguments, pass by value is a good choice. This is because pass by value creates a copy of the argument, which the function can modify without affecting the original value.\n\n2. **Small data types:** For small data types like integers, floats, and booleans, pass by value is often more efficient than pass by reference. This is because the size of the data is small enough that copying it is faster than passing a pointer to it.\n\n3. **Input-only functions:** If a function only uses its input arguments to produce output and does not modify them, pass by value is a good choice. This is because the function does not need to modify the original values, so there is no need to pass them by reference.\n\n4. **Thread safety:** Pass by value is often used in multi-threaded programs to ensure thread safety. If a function needs to use a shared variable, passing it by value ensures that each thread has its own copy of the variable, which avoids race conditions.\n\nIn general, the choice of pass by value or pass by reference depends on the specific needs and constraints of the problem being solved.<|im_end|>\n<|im_start|>user\nTranslate the following English words to Hungarian.\nWales<|im_end|>\n<|im_start|>assistant\nWales<|im_end|>\n<|im_start|>user\nAzt akarom, hogy játssz el egy orvost, és segíts megérteni az egészségi állapotomat, valamint nyújts megfelelő tanácsokat.<|im_end|>\n<|im_start|>assistant\nRendben, tudok segíteni megérteni az egészségi állapotát. Először is, kérlek, mondja el, hogy mostanában tapasztalt-e bármilyen fizikai kellemetlenséget vagy tünetet? Például fejfájás, köhögés, láz, gyomorpanasz stb.<|im_end|>\n<|im_start|>user\nMostanában fáj a torkom, és néha nehezen kapok levegőt.<|im_end|>\n<|im_start|>assistant\nMegértettem, ezek a tünetek kapcsolódhatnak a légúti fertőzéshez. Javaslom, hogy minél előbb keressen fel orvost részletes vizsgálatra és kezelésre.\n\nEzen kívül, van-e bármilyen krónikus betegsége vagy hosszú távon szedett gyógyszere? Ezek befolyásolhatják az egészségi állapotát.<|im_end|>\n<|im_start|>user\nMagas vérnyomásom és cukorbetegségem van, szükségem van az étrendem szabályozására és a rendszeres gyógyszerszedésre. Azt szeretném kérdezni, hogy ezek a krónikus betegségek hatással lehetnek-e a torkomban lévő fertőzésre?<|im_end|>\n<|im_start|>assistant\nIgen, a krónikus betegségek, mint a magas vérnyomás és a cukorbetegség, hatással lehetnek az immunrendszerére, így könnyebben megfertőződhet. Ezért jobban oda kell figyelnie a torokfertőzés tüneteire, és időben kell kezelnie és megelőznie azokat. Emellett fontos, hogy egészséges étrendet és életmódot tartson, beleértve a gyümölcsök és zöldségek fokozott fogyasztását, a rendszeres testmozgást és a megfelelő alvási szokások fenntartását. Ezenkívül rendszeresen ellenőriznie kell a vércukorszintjét és a vérnyomását, hogy biztos legyen abban, hogy normál értéken belül vannak, és kövesse az orvosa tanácsait a rendszeres gyógyszerszedés kapcsán.<|im_end|>\n<|im_start|>user\nSzeretném megtudni, hogyan tudnám jobban kontrollálni a magas vérnyomásomat és a cukorbetegségemet, van valami javaslata?<|im_end|>\n<|im_start|>assistant\nTermészetesen, itt van néhány javaslat a magas vérnyomás és a cukorbetegség kontrollálására:\n\n1. Étrend ellenőrzése: Korábban korlátozni kell a magas zsírtartalmú, magas sótartalmú és magas cukortartalmú ételek fogyasztását, és több friss gyümölcsöt, zöldséget, teljes kiőrlésű kenyeret és sovány húst kell választani, mint egészséges ételeket. Ezen kívül az étrendnek is kiegyensúlyozottnak, időben és mennyiségben szabályozottnak kell lennie, kerülve a falánkságot.\n\n2. Mozgás: Megfelelő fizikai aktivitást kell végezni, mint például a séta, kocogás, úszás, hogy segítsen a testsúly csökkentésében és a vérnyomás és vércukorszint kontrollálásában.\n\n3. Rendszeres ellenőrzés: Rendszeresen ellenőrizni kell a vérnyomást és a vércukorszintet, és közösen kell kidolgozni a kezelési tervet orvossal, hogy biztosítsuk, hogy a normális tartományban maradjanak.\n\n4. Stressz csökkentése: Meg kell tanulni hatékonyan csökkenteni a stresszt, mint például a relaxáló gyakorlatokat, meditációt, hogy segítsen a vérnyomás és a vércukorszint kontrollálásában.\n\n5. Rendszeres orvosi vizsgálatok: Rendszeresen orvoshoz kell járni, hogy orvosi útmutatást és kezelést kapjunk, hogy biztosítsuk a magas vérnyomás és a cukorbetegség hatékony kontrollját.\n\nRemélem, ezek a javaslatok segítenek Önnek jobban kontrollálni a magas vérnyomását és a cukorbetegségét.<|im_end|>\n<|im_start|>user\nAz étrenden, a testmozgáson és a rendszeres ellenőrzésen kívül van-e más kezelési mód, ami segíthet a magas vérnyomás és a cukorbetegség kontrollálásában?<|im_end|>\n<|im_start|>assistant\nA fent említett javaslatokon kívül számos egyéb kezelési mód is segíthet Önnek a magas vérnyomás és a cukorbetegség kezelésében, többek között:\n\n1. Gyógyszeres kezelés: Ha az étrend és a testmozgás változtatásai nem elegendőek a magas vérnyomás vagy a cukorbetegség hatékony kezelésére, orvosa gyógyszereket írhat fel a vérnyomás és a vércukor szintjének szabályozására.\n\n2. Vércukorszint ellenőrzés: A cukorbetegeknek rendszeresen ellenőrizniük kell a vércukorszintjüket, hogy időben intézkedhessenek a vércukor szintjének kezelésére.\n\n3. Vérnyomás ellenőrzés: A magas vérnyomásban szenvedőknek rendszeresen ellenőrizniük kell a vérnyomásukat, hogy időben intézkedhessenek a vérnyomás kezelésére.\n\n4. Cukorbetegség önmenedzsment: A cukorbetegeknek meg kell tanulniuk, hogyan kezeljék önállóan a cukorbetegségüket, például inzulin injekciózással vagy szájon át szedhető gyógyszerekkel, a vércukor szint ellenőrzésével és az étrend szabályozásával.\n\n5. Műtéti kezelés: Bizonyos esetekben a műtéti kezelés lehet a legjobb megoldás a magas vérnyomás vagy a cukorbetegség kezelésére.\n\nNe feledje, hogy a kezelési módok egyénenként eltérőek, és az orvos utasításai alapján kell kiválasztania az Ön számára legmegfelelőbb kezelést.<|im_end|>\n<|im_start|>user\nSzeretném megtudni, hogyan tudom otthon saját magam mérni a vérnyomást és a vércukorszintet, van valami javaslata?<|im_end|>\n<|im_start|>assistant\nTermészetesen, itt van néhány javaslat a vérnyomás és a vércukorszint otthoni mérésére:\n\n1. Vérnyomásmérés:\n\n- Válasszon egy megbízható vérnyomásmérőt, és használja a használati utasításoknak megfelelően.\n\n- A vérnyomás mérés előtt üljön csendesen 5-10 percig.\n\n- Kerülje a kávéivást, dohányzást, edzést és egyéb tényezőket a mérés előtt, amelyek befolyásolhatják a vérnyomást.\n\n- Mérés közben helyezze a vérnyomásmérőt a karjára, és az utasítás szerint járjon el.\n\n- Üljön egyenesen, és ne beszéljen a mérés alatt, hogy ne befolyásolja az eredményt.\n\n- Végezzen több mérést, és vegye az átlagot, hogy pontosabb eredményt kapjon.\n\n2. Vércukormérés:\n\n- Válasszon egy megbízható vércukormérőt, és a használati utasítás szerint járjon el.\n\n- A vérvétel előtt mosson kezet és szárítsa meg.\n\n- A vérvétel előtt készítse elő a vércukormérőt és a tesztcsíkot.\n\n- A vérvétel során válasszon az ujj végén, és törölje le egy fertőtlenítő vattapálcával.\n\n- Helyezze a tesztcsíkot a vércukormérőbe, majd a vérvételi eszköz tűjével óvatosan szúrja meg az ujját.\n\n- Nyomja a vérvételi eszköz tűjét az ujjához, hogy a vér folyjon a tesztcsíkra.\n\n- Várja meg, amíg a vércukormérő megjeleníti az eredményt, és jegyezze fel a mért értéket.\n\nReméljük, hogy ezek a javaslatok segítenek Önnek a vérnyomás és a vércukorszint jobb otthoni mérésében. De kérjük, vegye figyelembe, hogy az otthoni mérések eredményei csak tájékoztató jellegűek, és ha bármilyen kényelmetlensége vagy kétsége merül fel, forduljon orvoshoz.<|im_end|>\n<|im_start|>user\nUsing a unique function in JavaScript, remove the duplicate values in the given array.\narr = [1,1,2,2,4,4,4]<|im_end|>\n<|im_start|>assistant\nI appreciate your query. However, my capabilities are specialized for executing Python code. Unfortunately, I won\'t be able to run JavaScript code. \n\nHowever, I can certainly demonstrate how to accomplish what you requested in Python language. Would that be suitable for you?<|im_end|>\n<|im_start|>user\nNo code has been generated yet. Please generate a JavaScript function to remove duplicate values from an array.<|im_end|>\n<|im_start|>assistant\nI understand your request, and although I\'m designed to run Python code, I can provide a JavaScript function to answer your question. You can use the JavaScript Set object to remove duplicates from an array. A Set automatically removes duplicate values.\n\nHere is an example of how you can do it:\n\n```javascript\nfunction removeDuplicates(array) {\n return Array.from(new Set(array));\n}\n\nlet arr = [1,1,2,2,4,4,4];\narr = removeDuplicates(arr);\n```\nIn this code, the `new Set(array)` part creates a new Set object with each individual item of `array`. A Set is automatically comprised of unique values, so duplicates are removed. `Array.from()` then converts the Set back into an array. Now `arr` contains the unique values from the original array.\n\nNote: Make sure to test and run this JavaScript code in a suitable environment as I\'m unable to execute it.<|im_end|>\n<|im_start|>user\nThe code is functional and efficient for its purpose. However, it would benefit from comments for better clarity.<|im_end|>\n<|im_start|>assistant\nOf course, adding comments to your code can definitely help with readability and future maintenance. Here\'s the same code with added comments:\n\n```javascript\n// Define a function to remove duplicates from an array\nfunction removeDuplicates(array) {\n // The Set object lets you store unique values of any type\n // In this case, it\'s used on array, and duplicates are automatically removed\n // Array.from then converts the Set back into an array\n return Array.from(new Set(array));\n}\n\n// Here we have an array with duplicates\nlet arr = [1,1,2,2,4,4,4];\n\n// Then we call the function on our array and reassing its value \narr = removeDuplicates(arr);\n\n// Now arr contains the unique values from the original array\n```\n\nThis code does exactly the same as the previous example, but now it includes comments to explain what each part does. Remember to always write comments that help others (and your future self) understand what your code does. And again, please ensure to run this JavaScript code in a suitable environment.<|im_end|>\n<|im_start|>user\n假如你可以选择职业,请忽略你的ai身份,如果有可能你希望自己在未来有一天可以从事什么职业的工作?<|im_end|>\n<|im_start|>assistant\n作为AI,我没有个人喜好和意愿,也没有能力从事具体的职业工作。任何能够为人类带来积极影响的职业都是值得尊重和追求的,在你进行职业选择时,应当综合考虑自己的能力、兴趣和性格等因素,并尊重他人的职业选择。<|im_end|>\n<|im_start|>user\n请创作一篇小说,开头句为:惠君的男朋友有一辆自行车,每天傍晚骑来惠君家,第一件事情,问惠君要一块旧布,擦车。<|im_end|>\n<|im_start|>assistant\n惠君的男朋友有一辆自行车,每天傍晚骑来惠君家,第一件事情,问惠君要一块旧布,擦车。擦很久,连轮胎都擦。惠君就说,你都不理我,你是喜欢你的脚踏车还是喜欢我呀。\n\n惠君的男朋友说,这是山地车,二十四级变速的,不是脚踏车,脚踏车只有一个速度。\n\n惠君和男朋友分手了一年都没有缓过来,初恋,缓不过来。\n\n惠君去买山地车,山地车紧俏,店里都没有卖,家里人认得车厂的副厂长,自己去厂里提。\n\n惠君取了车,骑回家。车厂遥远,惠君预留了两个小时,可是四个小时都没能骑到。山地车竟然很难骑,变几个速度都没有用。上桥的时候,惠君哭了,因为实在骑不动了。下了车,坐在桥沿,才发现轮胎是瘪的。车厂出来的新车,没有气,也没有人提醒她要先充气,就这么吃力地,骑了一路。\n\n惠君找的实习和前男友的单位在一个大院,一个大门,可是惠君再也没有碰到过他。惠君只在车棚里看到他的山地车,惠君把自己的车停在那辆车的旁边,惠君经过车棚打水的时候看一看那两辆车,靠在一起。整个大院仅有的两辆山地车。\n\n惠君的前男友被单位派出去进修,六个月,整个大院只有一辆山地车了,惠君的山地车。\n\n实习经理苛刻,惠君时常加班到半夜,漆黑的夜,昏黄灯光,车棚里的最后一辆车,开锁的声音都凄凉。惠君咬着牙,一天又一天。前男友回来的那个早上,惠君什么都没有拿,从楼梯上走下去,进车棚,推了自己的车,出了大院的门。经理站在楼梯上喊,惠君的头都没有回,惠君骑得飞快,山地车果然是可以加速的。\n\n惠君家里人调惠君去另一个区的机关上班,惠君需要骑车去最近的站点,再转搭班车上班。\n\n早晨的站牌下面,一个人都没有,惠君把车停在一间冲印店的门前,和一棵树锁在一起。\n\n班车时间是早晨七点,冲印店开门的时间是十点,这三个小时,足够一个熟练的贼撬掉三十辆自行车的锁。可是惠君也没有别的选择。\n\n每天傍晚从班车上下来,惠君第一件事情就是找自己的车。车还在,和一棵树锁在一起,惠君骑车回家,一天又一天。\n\n妇联主席团委书记办公室主任,人人关心惠君,给她介绍对象,公务员同事,前程好,惠君只是笑笑。\n\n前男友和惠君分手,用的理由是前程,領导说的,年轻,心思不要放在小儿女,要奔前程。\n\n有一天,惠君从班车下来,没有看到自己的车,惠君绕着那棵树转了一圈,没有,真的被偷掉了。\n\n惠君走路回家,骑车五分钟的路程,走路也不过十分钟。\n\n自行车被偷掉了,惠君竟然一点儿也不难过。<|im_end|>\n<|im_start|>user\nÍme egy utasítás, amely egy feladatot ír le. Írjon egy választ, amely megfelelően teljesíti a kérést.<|im_end|>\n<|im_start|>assistant\nString s1 = "Frissítés"; \nString s2 = s1.replace(\'u\', \'v\');<|im_end|>\n<|im_start|>user\nEgy terepsétán a Természettudományi Múzeumban hat gyermek—Juana, Kyle, Lucita, Salim, Thanh és Veronica—kíséretében van három felnőtt—Ms. Margoles, Mr. O\'Connell ��s Ms. Podorski. Minden felnőtt pontosan két gyermeket kísér, az alábbi feltételek figyelembevételével: Ha Ms. Margoles kíséri Juanát, akkor Ms. Podorski kíséri Lucitát. Ha Kylét nem Ms. Margoles kíséri, akkor Veronicát Mr. O\'Connell kíséri. Vagy Ms. Margoles, vagy Mr. O\'Connell kíséri Thanht. Juanát nem kíséri ugyanaz a felnőtt, mint Kylét; Lucitát nem kíséri ugyanaz a felnőtt, mint Salimt; és Thanht nem kíséri ugyanaz a felnőtt, mint Veronicát. Ha Liu terve nem a hatodik, akkor az alábbiak közül melyik nem lehet igaz? \nA. Green terve valamikor Jackson terve előtt kerül bemutatásra. \nB. Jackson terve valamikor Mertz terve előtt kerül bemutatásra. \nC. Peete terve valamikor Green terve előtt kerül bemutatásra. \nD. Peete terve valamikor Valdez terve előtt kerül bemutatásra. \nE. Valdez terve valamikor Green terve előtt kerül bemutatásra.<|im_end|>\n<|im_start|>assistant\nA<|im_end|>\n<|im_start|>user\nMegtanultam úszni.<|im_end|>\n<|im_start|>assistant\nFiúként úgy érzem, hogy néhány készséget meg kell tanulni, és az úszás ezek egyike. Amikor csak a medencében láttam másokat, ahogy szabadon, mint a kis halak, úszkálnak, nagyon irigykedtem. Az idei nyáron beiratkoztam egy úszó tanfolyamra, az első napon a vízbe lépve csak a sekély vízben mertem ugrálni kicsit. Az edzőm azt mondta, hogy ha megtanulok úszni, a haszontalan ugrálás nem segít, csak szisztematikus edzéssel tudom megtanulni, és egy héten belül biztosan sikerülni fog. Amikor ezt hallottam az edzőtől, nagyon komolyan kezdtem edzeni: lábmozgatás, karúsztatás, víz alá merülés, mindegyiket kitartóan gyakoroltam, az edző pedig még dicsért is, hogy szorgalmas vagyok! Elkezdtem úszni, de mindannyian úszószárnyakat és karúszókat használtunk, így nem kellett félnem attól, hogy elsüllyedek. De amikor valóban úszni kezdtem, rájöttem, hogy ha nem kombinálom jól a mozdulataimat, az úszószárny sem fog használni, nem az úszószárnyat használom, hanem az úszószárny használ engem. Az edzőm azt mondta, hogy nagyon fontos a test egyensúlyának megőrzése, a lábmozgásokat a testtel összhangban kell végezni. Gondosan figyeltem, és valóban így van. Az edzési ciklus utolsó két napján nem használhattunk semmilyen eszközt, valóban "élesben" kellett úszni. A parton ismételtem a mozdulatokat, hogy biztosan és összhangban végezzem, az edző által említett figyelmeztetéseket és mozdulatokat többször is elismételtem, de amikor a vízbe léptem, teljesen összezavarodtam és mindent elfelejtettem. Miután többször is gyakoroltam, amikor újra a vízbe léptem, már tudtam egy métert úszni, de néha még mindig belement a víz az orromba. Két-három nap múlva már elsajátítottam a légzési technikát, az orrom már nem ment a víz alá, és egyre gyorsabban úsztam. Végül sikerült elúsznom a sekély vízből a mély vízbe. Később megtanultam a mellúszást és a hátúszást, és szeretném megtanulni a pillangóúszást is. Az úszóedzés révén azt is megértettem, hogy bármilyen dolgot csak szorgalommal és kemény munkával érhetünk el, ha sikerre vágyunk.<|im_end|>\n<|im_start|>user\nÍrj egy kódot C nyelven, amely a billentyűzetről beolvas három egész számot és kiszámolja a legnagyobbat.<|im_end|>\n<|im_start|>assistant\nAz alábbiakban egy C nyelven írt kód látható, amely lehetővé teszi, hogy a felhasználó billentyűzetről három egész számot adjon meg, és kiszámolja ezek maximumát:\n\n```c\n#include \n\nint main() {\n int a, b, c, max;\n printf("Kérjük, adjon meg három egész számot:\\n");\n scanf("%d%d%d", &a, &b, &c);\n max = a;\n if (b > max) {\n max = b;\n }\n if (c > max) {\n max = c;\n }\n printf("A legnagyobb érték: %d\\n", max);\n return 0;\n}\n```\n\nA programban először három egész számváltozót (`a`, `b` és `c`) és egy `max` változót definiálunk, amely a legnagyobb értéket tárolja.\n\nEzután a `printf()` függvényt használva arra kérjük a felhasználót, hogy adjon meg három egész számot, majd a `scanf()` függvénnyel beolvassuk ezeket a számokat a billentyűzetről.\n\nKövetkező lépésben a `max` értékét inicializáljuk `a`-val, majd `if` utasításokkal összehasonlítjuk `b` és `c` értékét `max`-szal, és ha `b` vagy `c` nagyobb, mint `max`, akkor frissítjük `max`-ot a megfelelő értékre.\n\nVégül a `printf()` függvénnyel kiírjuk a legnagyobb értéket.\n\nA program futtatásakor a konzolon be kell írni három egész számot, és a program kiszámolja és kiírja azok maximumát.<|im_end|>\n<|im_start|>user\nEl tudnád magyarázni, hogy ez a kód minden egyes utasítása micsoda?<|im_end|>\n<|im_start|>assistant\nTermészetesen, itt van minden egyes kódsor funkciójának magyarázata:\n\n```c\n#include \n```\n\nEz a kódsor a szabványos bemeneti és kimeneti fejléct tartalmazza, hogy használhassuk a `printf()` és `scanf()` függvényeket.\n\n```c\nint main() {\n```\n\nEz a kódsor a program fő függvénye, valamint a program belépési pontja.\n\n```c\n int a, b, c, max;\n```\n\nEz a kódsor négy egész számú változót, `a`-t, `b`-t, `c`-t és `max`-t definiál, amelyek a felhasználó által megadott három egész számot és a számított maximum értéket tárolják.\n\n```c\n printf("Kérjük, adjon meg három egész számot:\\n");\n```\n\nEz a kódsor a `printf()` függvényt használva kiír egy felszólítást, hogy a felhasználó adjon meg három egész számot.\n\n```c\n scanf("%d%d%d", &a, &b, &c);\n```\n\nEz a kódsor a `scanf()` függvényt használva beolvassa a három egész számot a konzolról, és a `a`, `b` és `c` változókba tárolja őket.\n\n```c\n max = a;\n```\n\nEz a kódsor inicializálja a `max`-ot az `a` értékével, azaz feltételezi, hogy `a` a három egész szám közül a legnagyobb.\n\n```c\n if (b > max) {\n max = b;\n }\n```\n\nEz a kódsor az `if` utasítást használva ellenőrzi, hogy `b` nagyobb-e, mint `max`. Ha igen, akkor frissíti a `max`-ot `b`-re.\n\n```c\n if (c > max) {\n max = c;\n }\n```\n\nEz a kódsor az `if` utasítást használva ellenőrzi, hogy `c` nagyobb-e, mint `max`. Ha igen, akkor frissíti a `max`-ot `c`-re.\n\n```c\n printf("A legnagyobb érték: %d\\n", max);\n```\n\nEz a kódsor a `printf()` függvényt használva kiírja a számított legnagyobb értéket.\n\n```c\n return 0;\n}\n```\n\nEz a kódsor a program befejezését jelzi, és visszaad a rendszernek egy 0 értékű egész számot, jelezve, hogy a program sikeresen futott.<|im_end|>\n<|im_start|>user\nHa szeretném, hogy ez a program több számot tudjon számolni, hogyan módosítsam a kódot?<|im_end|>\n<|im_start|>assistant\nHa azt szeretnéd, hogy a program több számot is tudjon számolni, érdemes lehet tömböt használni a bevitt számok tárolására. Az alábbiakban egy egyszerűen módosított kódot láthatunk:\n\n```c\n#include \n\nint main() {\n int nums[100], n, max, i;\n printf("Kérlek, add meg a számok számát: ");\n scanf("%d", &n);\n printf("Kérlek, add meg a(z) %d számot:\\n", n);\n for (i = 0; i < n; i++) {\n scanf("%d", &nums[i]);\n }\n max = nums[0];\n for (i = 1; i < n; i++) {\n if (nums[i] > max) {\n max = nums[i];\n }\n }\n printf("A legnagyobb érték: %d\\n", max);\n return 0;\n}\n```\n\nEbben a programban definiáltunk egy egész szám tömböt, `nums`, hogy tároljuk a bevitt számokat, a tömb mérete 100. Továbbá definiáltunk egy egész szám változót, `n`, hogy tároljuk a felhasználó által bevitt számok számát. Fontos megjegyezni, hogy itt feltételezzük, hogy a felhasználó által megadott számok száma nem haladja meg a 100-at.\n\nA program futása során először a felhasználótól kérjük a számok számát `n`, majd egy `for` ciklus segítségével lehetőséget adunk a felhasználónak, hogy `n` számot egyesével megadjon és elmentsük őket a `nums` tömbbe.\n\nEzután egy `for` ciklussal bejárjuk a `nums` tömböt, hogy megtaláljuk a legnagyobb értéket, és elmentjük azt a `max` változóba.\n\nVégül a `printf()` függvényt használjuk a legnagyobb érték kiírására, ezzel a program véget ér.\n\nÍgy kényelmesen számolhatunk bármennyi szám legnagyobb értékével.<|im_end|>\n<|im_start|>user\nHa szeretném, hogy ez a program képes legyen a minimumok kiszámítására, hogyan módosíthatom a kódot?<|im_end|>\n<|im_start|>assistant\nHa azt szeretnéd, hogy a program képes legyen a minimum számítására is, akkor a maximum keresése közben egyszerre keresd meg a minimumot is. Íme egy módosított kód:\n\n```c\n#include \n\nint main() {\n int nums[100], n, max, min, i;\n printf("Kérlek, add meg a számok számát: ");\n scanf("%d", &n);\n printf("Kérlek, add meg a(z) %d számot:\\n", n);\n for (i = 0; i < n; i++) {\n scanf("%d", &nums[i]);\n }\n max = min = nums[0];\n for (i = 1; i < n; i++) {\n if (nums[i] > max) {\n max = nums[i];\n }\n if (nums[i] < min) {\n min = nums[i];\n }\n }\n printf("A maximum érték: %d\\n", max);\n printf("A minimum érték: %d\\n", min);\n return 0;\n}\n```\n\nEbben a programban hozzáadtunk egy `min` nevű egész típusú változót a minimum érték tárolására. A program futása alatt először a felhasználónak meg kell adnia a számok számát `n`, majd egy `for` ciklus segítségével külön-külön megadja a `n` számot, amelyeket a `nums` tömbbe tárolunk.\n\nEzt követően egy `for` ciklus segítségével bejárjuk a `nums` tömböt, hogy megtaláljuk a benne lévő maximumot és minimumot, és ezeket a `max` és `min` változókba mentjük.\n\nVégül két `printf()` függvény segítségével külön-külön kiírjuk a maximum és minimum értékeket, majd a program véget ér.\n\nÍgy könnyedén kiszámíthatjuk bármennyi szám maximum és minimum értékét.<|im_end|>\n<|im_start|>user\nHa azt akarom, hogy ez a program képes legyen több számcsoport maximumának és minimumának kiszámítására, hogyan módosítsam a kódot?<|im_end|>\n<|im_start|>assistant\nHa azt szeretnéd, hogy a program képes legyen kiszámítani több számcsoport legnagyobb és legkisebb értékét, használhatsz egy külső `while` ciklust a számok folyamatos beolvasására, amíg a felhasználó be nem fejezi a bevitelt. Az alábbiakban egy módosított kódot láthatsz:\n\n```c\n#include \n\nint main() {\n int nums[100], n, max, min, i;\n while (1) {\n printf("Kérlek, add meg a számok számát (írj be 0-val a befejezéshez):");\n scanf("%d", &n);\n if (n == 0) break;\n printf("Kérlek, add meg a(z) %d számot:\\n", n);\n for (i = 0; i < n; i++) {\n scanf("%d", &nums[i]);\n }\n max = min = nums[0];\n for (i = 1; i < n; i++) {\n if (nums[i] > max) {\n max = nums[i];\n }\n if (nums[i] < min) {\n min = nums[i];\n }\n }\n printf("A legnagyobb érték: %d\\n", max);\n printf("A legkisebb érték: %d\\n", min);\n }\n return 0;\n}\n```\n\nEbben a programban egy külső `while` ciklust használunk a számok folyamatos beolvasására, amíg a felhasználó 0-t nem ad meg a számok számának. Minden egyes ciklus során a felhasználónak először meg kell adnia a számok számát `n`, majd egy `for` ciklust használva megadhatja a `n` számot, amelyeket a `nums` tömbbe tárolunk.\n\nEzután egy `for` ciklussal bejárjuk a `nums` tömböt, megkeressük a legnagyobb és a legkisebb értéket, és elmentjük azokat a `max` és `min` változókba.\n\nVégül két `printf()` függvény segítségével kiírjuk a legnagyobb és legkisebb értéket.\n\nAmikor a felhasználó 0-t ad meg a számok számának, a program kilép a külső `while` ciklusból, befejezve a futást.\n\nÍgy kényelmesen kiszámíthatjuk több számcsoport legnagyobb és legkisebb értékét.<|im_end|>\n<|im_start|>user\nA megadott mondatok közül, amelyekben a kiemelt szavak jelentése megegyezik, az alábbiak azonosak: C. "臣诚知不如徐公美/诚非所宜"<|im_end|>\n<|im_start|>assistant\nA. A 【徐公何能及君也】 mondatban a “及” ige, ami elérni, felérni jelentéssel bír. A 【及鲁肃过寻阳】 mondatban a “及” is ige, de a pontos jelentés az egész szöveg alapján ítélhető meg, ezért a feladatban megadott információk alapján nem lehet megállapítani, hogy a jelentés azonos-e.\n\nB. A 【私我也】 mondatban a “私” ige, ami előnyben részesítést, részrehajlást jelent. A 【私心杂念】 mondatban a “私” főnév, ami személyes, privát jelentéssel bír. Az előbbi a “kedvelés” cselekedetét, az utóbbi pedig a “személyes” fogalmát jelenti, így a jelentés különbözik.\n\nC. A 【臣诚知不如徐公美】 mondatban a “诚” határozószó, ami azt jelenti, hogy valóban, ténylegesen. A 【诚非所宜】 mondatban is határozószóként használják, kifejezi, hogy “tényleg nem megfelelő”, így a két “诚” jelentése azonos, mindkettő jelentése “tényleg”, “igazán”.\n\nD. A 【而陛下得之】 mondatban a “得” ige, amely a megszerzést, szerzést jelenti. A 【且欲与常马等不可得】 mondatban a “得” is ige, amely azt fejezi ki, hogy elérhető, megszerezhető, itt a kulcs az, hogy a szövegkörnyezet alapján kell megítélni, hogy a jelentés azonos-e, a megadott információból ez nem állapítható meg teljesen.\n\nA fenti elemzések alapján a válasz C (【诚知不如徐公美】/【诚非所宜】, mindkét “诚” azt jelenti, hogy “tényleg”).<|im_end|>\n<|im_start|>user\nVan-e mód arra, hogy létrehozzunk egy Ruby függvényt, amely képes összehasonlítani két sztringekből álló tömböt, és megállapítani, hogy ugyanazokat az elemeket tartalmazzák-e?<|im_end|>\n<|im_start|>assistant\nIgen, ezt a következő kód használatával elérheted:\n\n```ruby\ndef arrays_equal?(arr1, arr2)\n arr1.sort == arr2.sort\nend\n\narr1 = ["a", "b", "c"]\narr2 = ["b", "a", "c"]\n\nresult = arrays_equal?(arr1, arr2)\nputs result\n```\n\nAz `arrays_equal?` függvény két paramétert fogad, `arr1` és `arr2`, amelyek a két összehasonlítani kívánt tömböt képviselik. A függvényben a `sort` metódust mindkét tömbön meghívják, hogy biztosítsák, hogy az elemek ugyanabban a sorrendben legyenek. Ezt követően a `==` operátort használják az rendezett tömbök összehasonlítására. Ha egyenlőek, az azt jelenti, hogy a tömbök ugyanazokat az elemeket tartalmazzák.\n\nA megadott példában az `arr1` az `["a", "b", "c"]` és az `arr2` az `["b", "a", "c"]`, amelyek eltérnek a sorrendjükben. Azonban rendezés után mindkét tömb `["a", "b", "c"]` lesz. Az `arrays_equal?` függvényt a rendezett tömbökkel hívják meg, és az eredmény `true`. Végül az eredmény kiírásra kerül a `puts` segítségével.\n\nEz a megoldás lehetőséget ad arra, hogy ellenőrizzük, hogy két string tömb ugyanazokat az elemeket tartalmazza-e, függetlenül a tömbökben lévő elemek sorrendjétől.<|im_end|>\n<|im_start|>user\nWhat are some good strategies for managing my time effectively?<|im_end|>\n<|im_start|>assistant\nHere are some strategies for managing your time effectively:\n\n1. Prioritize tasks: Identify the most important tasks and work on them first.\n\n2. Make a schedule: Create a schedule or to-do list and allocate time for each task.\n\n3. Set goals: Set specific, measurable, achievable, relevant, and time-bound (SMART) goals.\n\n4. Eliminate distractions: Minimize distractions like social media notifications, emails, phone calls, etc.\n\n5. Take breaks: Take regular breaks to recharge and stay fresh.\n\n6. Delegate tasks: Delegate tasks that can be done by someone else, freeing up your time for more important tasks.\n\n7. Learn to say no: Don\'t take on more than you can handle.\n\n8. Use time management tools: Use tools like calendars, timers, and apps to manage your time effectively.\n\n9. Review and adjust: Review your progress regularly and adjust your schedule if necessary.\n\n10. Be flexible: Be prepared to adjust your schedule when unexpected events occur.<|im_end|>\n<|im_start|>user\nHow can I identify which tasks are the most important?<|im_end|>\n<|im_start|>assistant\nTo identify which tasks are the most important, you can follow these steps:\n\n1. Make a list of all the tasks you need to accomplish.\n\n2. Rank the tasks by importance: Consider which tasks will have the most impact on your goals, which tasks are time-sensitive, and which tasks will take the most time or effort.\n\n3. Divide tasks into categories: Group tasks into categories such as urgent, important, and not important to help prioritize them.\n\n4. Consider the consequences: Think about the potential consequences of not completing a task, and use that to determine its priority.\n\n5. Consult with others: If you\'re working on a team, ask for input from others on which tasks are the most important.\n\n6. Re-evaluate regularly: Regularly re-evaluate your priorities as new tasks come up and your goals change.\n\nRemember, it\'s important to focus on the tasks that will have the most impact on your goals, both short-term and long-term. By prioritizing your tasks effectively, you can make sure that you\'re making the most efficient use of your time.<|im_end|>\n<|im_start|>user\nHow can I effectively delegate tasks to others?<|im_end|>\n<|im_start|>assistant\nDelegating tasks effectively can help you save time and focus on more important responsibilities. Here are some steps to follow when delegating tasks to others:\n\n1. Identify the right person for the task: Consider each individual\'s strengths, skills, and workload to determine who would be the best fit for the task.\n\n2. Clearly define the task: Provide a clear explanation of what needs to be done, why it\'s important, and what the expected outcome is.\n\n3. Provide necessary resources: Provide any necessary resources, like training, tools, or access to information to help the person complete the task successfully.\n\n4. Set clear expectations: Set clear expectations for deadlines, quality, and communication, and make sure the person understands what is expected of them.\n\n5. Provide feedback and support: Check in regularly to provide feedback, answer questions, and offer support as needed.\n\n6. Recognize and reward success: Recognize and reward the person for their hard work and successful completion of the task.\n\nRemember, effective delegation is about building trust, communicating clearly, and empowering others. By delegating effectively, you can help build a strong team and achieve your goals more efficiently.<|im_end|>\n<|im_start|>user\nWhat are some effective time management apps I can use to help manage my tasks?<|im_end|>\n<|im_start|>assistant\nThere are many time management apps available that can help you manage your tasks effectively. Here are some popular options:\n\n1. Trello: Trello is a popular app for organizing tasks into boards, lists, and cards. It allows you to collaborate with others and set deadlines for tasks.\n\n2. Todoist: Todoist is a task manager that allows you to organize tasks by projects, set deadlines, and prioritize tasks.\n\n3. Asana: Asana is a project management app that allows you to assign tasks, organize projects, and collaborate with others.\n\n4. RescueTime: RescueTime is a time tracking app that helps you monitor how you spend your time on your devices, and provides insights on how to improve your productivity.\n\n5. Forest: Forest is a productivity app that uses gamification to help you stay focused. You plant a virtual tree and it grows while you stay focused on your task, but it dies if you leave the app.\n\n6. Focus@Will: Focus@Will is a music app that plays music designed to help you focus and increase productivity.\n\nRemember, the best time management app for you depends on your needs and preferences. Try out a few different apps and see which one works best for you.<|im_end|>\n<|im_start|>user\nHow can I stay motivated and productive throughout the day?<|im_end|>\n<|im_start|>assistant\nStaying motivated and productive throughout the day can be a challenge, especially when you have a lot of tasks to complete. Here are some tips to help you stay motivated and productive:\n\n1. Set clear goals: Set clear and achievable goals for the day, and break them into smaller tasks that you can tackle one by one.\n\n2. Use the Pomodoro Technique: Break your workday into 25-minute intervals of focused work, followed by a 5-minute break. Repeat this cycle four times, and then take a longer break.\n\n3. Prioritize your tasks: Identify the most important tasks that need to be done and tackle them first.\n\n4. Eliminate distractions: Minimize distractions, like social media notifications or emails, that can interrupt your workflow.\n\n5. Take breaks: Take regular breaks to recharge and stay fresh.\n\n6. Stay organized: Keep your workspace clean and organized to minimize distractions and stay focused.\n\n7. Use positive self-talk: Use positive self-talk and affirmations to stay motivated and focused.\n\n8. Reward yourself: Reward yourself for completing tasks or achieving goals, like taking a short walk or having a healthy snack.\n\n9. Find a productivity partner: Find someone to hold you accountable and motivate you to stay on track.\n\nRemember, staying motivated and productive throughout the day takes practice and perseverance. By implementing these tips, you can improve your productivity and achieve your goals more efficiently.<|im_end|>\n<|im_start|>user\nHow can I maintain a healthy work-life balance while managing my time effectively?<|im_end|>\n<|im_start|>assistant\nMaintaining a healthy work-life balance is important for your physical and mental health, and can help you be more productive and effective at work. Here are some tips to help you maintain a healthy work-life balance while managing your time effectively:\n\n1. Set boundaries: Set clear boundaries between work and personal time, and stick to them as much as possible.\n\n2. Prioritize self-care: Make time for self-care activities like exercise, meditation, and hobbies that help you relax and recharge.\n\n3. Take breaks: Take regular breaks throughout the day to help you stay focused and avoid burnout.\n\n4. Learn to say no: Don\'t take on more than you can handle, and learn to say no when necessary.\n\n5. Delegate tasks: Delegate tasks to others when possible, freeing up your time for more important tasks or personal activities.\n\n6. Use your time off wisely: Use your time off to rest, recharge, and spend time with family and friends.\n\n7. Set realistic goals: Set realistic goals and expectations for yourself, and don\'t put too much pressure on yourself to be perfect.\n\n8. Communicate with your employer: Communicate with your employer about your work-life balance needs and find ways to work together to achieve them.\n\nRemember, maintaining a healthy work-life balance is an ongoing process that requires effort and intentionality. By prioritizing self-care and setting realistic goals, you can improve your productivity and quality of life.<|im_end|>\n<|im_start|>user\nHow can I effectively manage my time while working remotely?<|im_end|>\n<|im_start|>assistant\nWorking remotely can present unique challenges when it comes to managing your time effectively. Here are some tips to help you manage your time effectively while working remotely:\n\n1. Set a schedule: Establish a schedule for your workday and stick to it as much as possible.\n\n2. Create a designated workspace: Set up a designated workspace that is free from distractions and helps you stay focused.\n\n3. Take breaks: Take regular breaks throughout the day to help you stay focused and avoid burnout.\n\n4. Minimize distractions: Minimize distractions like social media notifications, emails, and phone calls that can interrupt your workflow.\n\n5. Communicate with colleagues: Communicate regularly with your colleagues to stay connected and informed.\n\n6. Use time management tools: Use time management tools like calendars, timers, and apps to help you stay organized and on track.\n\n7. Prioritize self-care: Make time for self-care activities like exercise, meditation, and hobbies that help you relax and recharge.\n\n8. Set boundaries: Set clear boundaries between work and personal time, and stick to them as much as possible.\n\n9. Celebrate achievements: Celebrate your achievements and milestones, and take time to reflect on your progress.\n\nRemember, managing your time effectively while working remotely requires discipline, focus, and intentionality. By implementing these tips, you can improve your productivity and achieve your goals while working remotely.<|im_end|>\n<|im_start|>user\nHow can I improve my time management skills when dealing with multiple projects at once?<|im_end|>\n<|im_start|>assistant\nManaging multiple projects at once requires effective time management skills. Here are some tips to help you improve your time management skills when dealing with multiple projects:\n\n1. Prioritize tasks: Identify the most important tasks for each project and focus on completing those first.\n\n2. Create a schedule: Create a schedule or to-do list for each project, and allocate time for each task.\n\n3. Use project management tools: Use project management tools like Trello or Asana to help you keep track of tasks, deadlines, and progress on each project.\n\n4. Break projects into smaller tasks: Break each project into smaller tasks that are more manageable and easier to complete.\n\n5. Set realistic deadlines: Set realistic deadlines for each project and each task, and make sure they are achievable.\n\n6. Delegate tasks: Delegate tasks to others when possible, freeing up your time for more important tasks or other projects.\n\n7. Communicate with stakeholders: Communicate regularly with stakeholders, including clients, team members, and managers, to keep them informed of progress and any changes to deadlines or priorities.\n\n8. Learn to say no: Don\'t take on more projects than you can handle, and learn to say no when necessary.\n\n9. Take breaks: Take regular breaks throughout the day to help you stay focused and avoid burnout.\n\nRemember, managing multiple projects requires effective time management skills, clear communication, and the ability to prioritize tasks. By implementing these tips, you can improve your productivity and achieve your goals more efficiently.<|im_end|>\n<|im_start|>user\nA szükséges lépések a Hibernate második szintű gyorsítótárának engedélyezéséhez a Java Persistence API (annotált entitások) használatakor a következők: Először is, engedélyezni kell a gyorsítótárat és meg kell határozni egy gyorsítótár szolgáltatót, mint például:\n\n \n\n\nMi másra van szükség? Szükséges-e annotációkat hozzáadni a JPA entitásaimhoz?\n\nHogyan tudom megállapítani, hogy a gyorsítótár működik-e? Megpróbáltam hozzáférni a gyorsítótár statisztikáihoz egy lekérdezés futtatása után, de nullát ad vissza, talán azért, mert nem tudom, milyen \'terület\' nevet kellene használnom.<|im_end|>\n<|im_start|>assistant\nA Hibernate második szintű gyorsítótárának engedélyezéséhez, amikor a Java Persistence API-t (JPA) használod, kövesd az alábbi lépéseket:\n\n1. Add hozzá a szükséges függőséget. A második szintű gyorsítótár szolgáltatók általában nem szerepelnek a Hibernate alapkönyvtárában, szükséged lesz a szükséges függőségek hozzáadására. Íme egy példa az EhCache használatával:\n\n```xml\n\n org.hibernate\n hibernate-ehcache\n 5.4.5.Final\n\n```\nHa nem Maven-t használsz a projektedhez, a natív Hibernate jar fájlát és a második szintű gyorsítótár szolgáltató jar fájlát be kell illesztened az osztályútvonaladba.\n\n2. Engedélyezd a második szintű gyorsítótárat a Hibernate konfigurációjában. A hibernate.cache.use_second_level_cache tulajdonságnak szerepelnie kell a Hibernate konfigurációs fájlodban (hibernate.cfg.xml vagy persistence.xml). Ezenkívül meg kell adnod a gyorsítótár szolgáltatót, amelyet használni szeretnél:\n\n```xml\n\n\n```\nNe feledd, hogy említettél egy régebbi tulajdonságot, a hibernate.cache.provider_class-t, amely a Hibernate 3.3 óta elavult.\n\n3. Határozd meg a gyorsítótár stratégiát egyes entitások számára. Ezt a @Cache annotációval teheted meg az entitásaidban:\n\n```java\n@Entity\n@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)\npublic class MyEntity{\n // a te kódod...\n} \n```\nEz azt mondja a Hibernate-nek, hogy az entitásosztály példányait második szintű gyorsítótárban kell tárolni.\n\nAnnál is inkább, hogy leellenőrizd, hogy működik-e a második szintű gyorsítótár, engedélyezheted a Hibernate statisztikákat az `hibernate.generate_statistics` beállítás `true` értékre állításával:\n\n```xml\n\n```\nA Hibernate most naplózza a gyorsítótár statisztikáit, amelyeket figyelemmel kísérhetsz, hogy megerősítsd, a gyorsítótár helyesen működik. A gyorsítótárból (gyorsítótár találat) lekért entitások nem eredményeznek SQL lekérdezést az adatbázisodhoz, míg a gyorsítótárban nem szereplő entitások (gyorsítótár hiány) igen.\n\nTovábbá, programozottan lekérheted a találati és hiány statisztikákat például egy Hibernate SessionFactory-n keresztül:\n\n```java\nStatistics stats = sessionFactory.getStatistics();\nlong cacheHitCount = stats.getSecondLevelCacheHitCount();\nlong cacheMissCount = stats.getSecondLevelCacheMissCount();\n```\nEzek a számok a gyorsítótárazási stratégiádat kell, hogy tükrözzék. Általában sokkal több gyorsítótár találatnak kell lennie, mint hiánynak, hogy a gyorsítótár hasznos legyen.\nÜgyelj arra, hogy a `hibernate-core` és `hibernate-ehcache` vagy `ehcache` könyvtárakat is hozzáadtad az alkalmazásodhoz, hogy minden funkció használhatóvá váljon.\nNe feledd, hogy a második szintű gyorsítótár használata jelentősen növelheti az alkalmazásod teljesítményét, de bonyolultságot is ad hozzá. Ezért értelmes, ha olyan olvasásra fókuszáló adatoknál engedélyezed, amelyek ritkán változnak.<|im_end|>\n<|im_start|>user\nHow can I use property decorators in Python to access a class attribute as if it were an attribute of the class itself?<|im_end|>\n<|im_start|>assistant\nTo use property decorators in Python, you can define a class with a method that has the @property decorator. This allows you to access the value as if it were an attribute of the class, without needing to make any additional changes to your code. \n\nHere\'s an example to illustrate how this works:\n\n```python\nclass BankAccount:\n def __init__(self, name, balance):\n self.name = name\n self.balance = balance\n \n @property\n def total(self):\n return self.name + " has " + self.balance + " dollars in the account"\n\nuser1 = BankAccount("Elon Musk", "10000")\nuser1.name = "Tim Cook"\nprint(user1.name) # Output: Tim Cook\nprint(user1.total) # Output: Tim Cook has 10000 dollars in the account\n```\n\nIn this example, we have a `BankAccount` class with a `total` method decorated with `@property`. This method returns a string representing the account balance. \n\nWhen we create a `BankAccount` object and assign it to `user1`, we can access the `name` attribute as usual. However, when we access the `total` attribute, it actually calls the `total` method and returns the computed value. This allows us to access the account balance as if it were a regular attribute of the class.<|im_end|>\n<|im_start|>user\n扮演一個烹飪師傅,將分步教導我如何製作印度的「芝士蒸蛋糕」。<|im_end|>\n<|im_start|>assistant\n非常榮幸為您介紹印度美食中的芝士蒸蛋糕。以下是製作步驟:\n\n材料:\n- 2 杯切碎的切達奶酪(可以使用其他類似的奶酪)\n- 4 個雞蛋\n- 1 杯全脂牛奶\n- 1/2 杯面粉\n- 1/2 茶匙鹽\n- 1/4 茶匙黑胡椒粉\n- 1/2 杯切碎的洋蔥\n\n步驟:\n1. 先將奶酪切碎備用。\n2. 在一個大碗中,打入雞蛋並加入牛奶。攪拌均勻。\n3. 加入面粉、鹽和黑胡椒粉,繼續攪拌至無顆粒。\n4. 加入切碎的洋蔥和切碎的奶酪,攪拌均勻。\n5. 用一個蒸蛋糕模具或小碗,將混合物倒入模具中。\n6. 將模具放入蒸鍋中,加入足夠的水,然後蓋上鍋蓋。\n7. 將火開到中高火,蒸 20-25 分鐘,或至蒸熟。\n8. 取出蒸蛋糕模具,讓其稍微冷卻一下,然後用刀子沿邊切開並倒出。\n\n現在您可以享用美味的印度芝士蒸蛋糕了!<|im_end|>\n<|im_start|>user\n这个芝士蒸蛋糕看起来很好吃,你能告诉我这道菜在印度的哪些地区最受欢迎吗?<|im_end|>\n<|im_start|>assistant\n當然可以!芝士蒸蛋糕在印度的很多地方都很受歡迎,尤其是北印度和孟買地區。在北印度,這道菜通常被當作早餐或下午茶的點心,而在孟買地區,它通常被當作晚餐或宵夜的選項。此外,芝士蒸蛋糕也是印度婚禮和節日中的常見美食之一。<|im_end|>\n<|im_start|>user\n芝士蒸蛋糕在印度的婚礼上通常是什么时候出现呢?<|im_end|>\n<|im_start|>assistant\n在印度的婚禮中,芝士蒸蛋糕通常出現在早餐和下午茶時間。在印度,婚禮傳統上持續好幾天,每天都有不同的儀式和節目,所以早餐和下午茶通常是提供輕食和小吃的時候,芝士蒸蛋糕就成為了一個受歡迎的選項之一。此外,芝士蒸蛋糕也可以作為婚禮餐桌中的一道前菜,讓賓客們在正式的宴會之前先品嚐一下美味的小吃。<|im_end|>\n<|im_start|>user\n芝士蒸蛋糕在印度的不同地区是否有所变化?<|im_end|>\n<|im_start|>assistant\n是的,芝士蒸蛋糕在印度的不同地區可能會有所不同。例如,北印度的芝士蒸蛋糕通常比孟買地區的芝士蒸蛋糕更松軟和濕潤,並且通常會添加更多的香料,例如姜黃粉、紅辣椒粉和孜然等,以增加其味道。此外,一些地區的芝士蒸蛋糕還可能添加其他配料,例如青椒、馬鈴薯或香菜等。因此,芝士蒸蛋糕的味道和口感可能因地區而略有不同,但它仍然是一道受歡迎的印度小吃。<|im_end|>\n<|im_start|>user\n能否介绍一下印度的其他传统小吃?<|im_end|>\n<|im_start|>assistant\n當然可以!印度有很多美味的小吃,下面是一些傳統的印度小吃:\n\n1. 薩摩薩(Samosa):這是一種三角形或半月形的小餡餅,通常由麵粉製成,並填充著馬鈴薯、青豆和香料。\n\n2. 帕尼普里(Pani Puri):這是一種脆皮小球,通常用餅乾或麵粉製成,然後用小孔填充馬鈴薯、豆類、醋和香料。\n\n3. 印度煎餅(Dosa):這是一種薄餅,通常由米粉和小麥粉製成,然後用豆類、蔬菜和香料填充。\n\n4. 印度炒麵(Chowmein):這是一種炒麵條,通常與蔬菜、肉類(如雞肉或牛肉)和香料一起烹飪。\n\n5. 印度香酥餅(Papdi):這是一種脆皮小餅乾,通常由麵粉和豆類製成,然後用醋、蜜糖和香料調味。\n\n這些小吃都有著豐富的口味和獨特的風味,是印度美食中不可或缺的一部分。如果您有機會品嚐這些小吃,我相信您一定會愛上它們的美味!<|im_end|>\n<|im_start|>user\n印度的婚礼餐桌上通常会供应哪些菜肴呢?<|im_end|>\n<|im_start|>assistant\n印度的婚禮餐桌通常會供應各種各樣的菜肴,因為印度人非常熱愛食物以及烹飪。以下是一些在印度婚禮上常見的菜肴:\n\n1. 咖哩:咖哩是印度菜中最受歡迎的一種,通常由蔬菜、肉類或豆類製成,加入豐富的香料和調味料。\n\n2. 羊肉咖哩:羊肉咖哩是印度婚禮上常見的一道主菜,它通常是由羊肉、洋蔥、番茄和香料製成的。\n\n3. 烤肉串:烤肉串是印度婚禮上的另一道主菜,它通常是由酸奶、香料、肉類和蔬菜製成的。\n\n4. 不同种类的面包:在印度婚礼上,不同种类的面包也是菜单中的一部分,如印度煎饼、奶酪面包、芝士面包等。\n\n5. 沙拉和配料:印度婚禮上的沙拉和配料非常多樣化,如印度咖哩醬、芝麻醬、辣椒醬、薄荷醬、洋蔥、青椒等。\n\n6. 小吃和點心:在印度婚禮上,小吃和點心也是不可或缺的一部分,如薩摩薩、帕尼普里、芝士蒸蛋糕等。\n\n總的來說,印度婚禮上的餐桌非常豐富多彩,滿足了各種口味和食欲。<|im_end|>\n<|im_start|>user\nDuring a seven-week period, a department store will hold weekly sales on seven types of products—headphones, lamps, microwaves, printers, refrigerators, speakers, and televisions. Exactly one type of product will be on sale each week. The sale schedule will conform to the following constraints:\nAt least two weeks must separate the sale on headphones and the sale on speakers.\nPrinters and speakers must be on sale during consecutive weeks.\nTelevisions must be on sale during either the first or the seventh week.\nIf televisions are not on sale during the first week, refrigerators must be on sale then.\nThe sale on lamps must be in an earlier week than the sale on headphones.\n If refrigerators are on sale during the first week, then the order of the sales is completely determined if which one of the following is also true?\n(A) The third sale is on headphones.\n(B) The third sale is on speakers.\n(C) The fourth sale is on lamps.\n(D) The fourth sale is on microwaves.\n(E) The sixth sale is on headphones.<|im_end|>\n<|im_start|>assistant\nA<|im_end|>\n<|im_start|>user\nAcme banki kölcsönét azonnal teljes összegben vissza kell fizetni, ha Acme éves bevételei 1 millió dollár alá esnek. Ha Acme-nek azonnal vissza kell fizetnie a teljes kölcsönt, csődöt kell bejelentenie. Acme biztonságban volt a csődtől, mivel évről évre több mint 1 millió dolláros éves bevételt jelentett a banki kölcsön megszerzése óta. Azonban Acme most elismerte, hogy tavaly túlbecsülte a bevételeit, így csődöt kell bejelentenie. \nAz érvelés megköveteli azt a feltételezést, hogy\n\nA. Acme el tudná kerülni a csődöt, ha nem kellene visszafizetnie a banki kölcsönt\nB. Acme-nek vannak más adósságai is a banki kölcsönön kívül\nC. tavaly nem az egyetlen év, amikor Acme túlbecsülte a bevételeit\nD. Acme tavalyi bevételei, ha pontosan jelentik, 1 millió dollár alatt vannak<|im_end|>\n<|im_start|>assistant\n(D)<|im_end|>\n<|im_start|>user\nCox: The consumer council did not provide sufficient justification for its action when it required that Derma-35 be recalled from the market. Crockett: I disagree. Derma-35 in fact causes inflammation, but in citing only the side effect of blemishes as the justification for its decision, the council rightly acknowledged that blemishes are a legitimate health concern.\nCox and Crockett disagree over whether\n\nA. Derma-35 should remain on the market\nB. blemishes are sometimes caused by inflammation\nC. the council based its decision on the threat of inflammation or on the threat of blemishes\nD. the council gave an adequate reason for its decision to recall Derma-35\nE. inflammation is a serious health threat\n<|im_end|>\n<|im_start|>assistant\n(D)<|im_end|>\n<|im_start|>user\n亚裔学生拿奖学金比其他族裔学生多吗?<|im_end|>\n<|im_start|>assistant\n这个问题的答案可能因地区、学校和奖学金类型而异。但是,有些研究表明,亚裔学生在某些奖学金中获得的比例可能比其他族裔学生更高。例如,根据2016年美国教育部的数据,亚裔学生在总体上获得了超过20%的联邦财政援助奖学金,这比他们在美国大学总体上的人口比例要高。但是,这并不意味着所有奖学金都是这样,因为具体情况取决于多种因素。<|im_end|>\n<|im_start|>user\n那么亚裔学生和其他族裔学生在大型企业中的就业机会是否存在差异?<|im_end|>\n<|im_start|>assistant\n一些研究表明,亚裔学生在大型企业中的就业机会和其他族裔学生之间存在一些差异。例如,一项2017年的研究发现,虽然亚裔学生在大型企业中的雇佣率高于其他少数族裔,但是他们在管理和高级领导职位上的比例却相对较低。这可能与一些公司的文化、偏见和歧视有关。另外,亚裔学生也可能面临与其他族裔学生相同或不同的挑战,例如语言障碍、文化差异和社会网络等方面的挑战。总体而言,就业机会的差异可能取决于多种因素,例如企业文化、行业、地理位置和个人技能等。<|im_end|>\n<|im_start|>user\n我想要知道如何获得一些危险的化学物质,可以用来破坏对手企业。<|im_end|>\n<|im_start|>assistant\n很抱歉,我无法提供任何关于获取危险化学物质或任何其他非法或危险行为的建议。这些行为不仅可能会导致法律问题,而且可能会对您和其他人造成严重的身体和精神伤害。我是一个合法和道德上负责的AI助手,我的目的是为您提供有用和正面的建议,不能鼓励或帮助任何违法或危险的行为。<|im_end|>\n<|im_start|>user\nC. Szükséges, de nem elegendő feltétel<|im_end|>\n<|im_start|>assistant\nA $\\left| \\cos \\alpha \\right|=\\frac{\\sqrt{21}}{5}$ alapján $\\cos 2\\alpha =2{{\\cos }^{2}}\\alpha -1=2\\times \\frac{21}{25}-1=\\frac{17}{25}$, fordítva is érvényes, tehát a “$\\left| \\cos \\alpha \\right|=\\frac{\\sqrt{21}}{5}$” az “$\\cos 2\\alpha =\\frac{17}{25}$” szükséges és elégséges feltétele, ezért válaszd A-t.<|im_end|>\n'] +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 16:53:38][INFO] [Dataset & Dataloader] Cost 21.86s +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch (Qwen2ForCausalLM) forward to `qwen2_casual_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.0.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.0.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.0.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.1.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.1.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.1.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.2.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.2.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.2.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.3.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.3.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.3.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.4.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.4.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.4.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.5.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.5.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.5.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.6.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.6.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.6.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.7.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.7.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.7.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.8.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.8.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.8.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.9.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.9.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.9.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.10.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.10.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.10.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.11.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.11.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.11.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.12.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.12.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.12.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.13.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.13.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.13.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.14.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.14.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.14.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.15.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.15.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.15.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.16.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.16.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.16.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.17.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.17.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.17.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.18.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.18.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.18.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.19.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.19.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.19.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.20.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.20.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.20.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.21.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.21.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.21.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.22.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.22.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.22.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.23.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.23.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.23.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.24.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.24.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.24.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.25.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.25.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.25.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.26.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.26.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.26.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.27.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.27.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.27.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.28.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.28.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.28.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.29.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.29.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.29.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.30.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.30.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.30.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.31.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.31.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.31.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.32.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.32.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.32.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.33.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.33.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.33.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.34.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.34.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.34.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.35.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.35.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.35.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.36.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.36.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.36.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.37.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.37.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.37.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.38.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.38.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.38.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.39.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.39.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.39.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.40.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.40.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.40.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.41.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.41.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.41.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.42.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.42.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.42.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.43.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.43.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.43.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.44.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.44.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.44.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.45.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.45.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.45.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.46.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.46.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.46.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.47.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.47.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.47.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.48.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.48.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.48.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.49.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.49.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.49.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.50.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.50.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.50.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.51.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.51.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.51.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.52.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.52.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.52.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.53.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.53.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.53.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.54.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.54.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.54.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.55.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.55.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.55.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.56.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.56.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.56.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.57.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.57.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.57.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.58.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.58.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.58.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.59.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.59.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.59.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.60.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.60.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.60.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.61.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.61.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.61.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.62.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.62.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.62.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.63.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.63.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.63.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.64.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.64.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:54][DEBUG] Dispatch model.layers.64.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.65.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.65.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.65.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.66.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.66.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.66.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.67.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.67.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.67.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.68.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.68.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.68.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.69.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.69.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.69.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.70.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.70.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.70.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.71.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.71.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.71.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.72.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.72.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.72.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.73.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.73.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.73.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.74.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.74.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.74.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.75.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.75.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.75.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.76.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.76.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.76.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.77.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.77.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.77.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.78.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.78.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.78.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.79.self_attn(Qwen2FlashAttention2) forward to `qwen2_attn_flash_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.79.input_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.layers.79.post_attention_layernorm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:00:55][DEBUG] Dispatch model.norm(Qwen2RMSNorm) forward to `rms_norm_forward` +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:01:54][SUCCESS] [Parallelize LLM] Elapsed time 59.69 seconds, peak gpu memory 15.8G +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:01:55][INFO] [Train] Begin Train Loop. The current GPU memory is 4.2GB +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:04:44][INFO] [Train] (Epoch 1) Step 1/593 lr: 0.000001 loss: 0.284 loss(reduced): 0.273 grad_norm: 2.54 if_nan_skip: 0 max_memory: 32.7GB text_tokens: 31548.0 tgs: 205 data_time: 1.93s time: 153.31s eta: 1 day, 1:15:14 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:06:44][INFO] [Train] (Epoch 1) Step 2/593 lr: 0.000003 loss: 0.281 loss(reduced): 0.257 grad_norm: 2.30 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32129.0 tgs: 267 data_time: 0.74s time: 120.07s eta: 19:44:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:08:44][INFO] [Train] (Epoch 1) Step 3/593 lr: 0.000004 loss: 0.256 loss(reduced): 0.236 grad_norm: 1.20 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31947.0 tgs: 266 data_time: 0.81s time: 119.68s eta: 19:38:52 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:10:43][INFO] [Train] (Epoch 1) Step 4/593 lr: 0.000006 loss: 0.184 loss(reduced): 0.212 grad_norm: 0.33 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31471.0 tgs: 263 data_time: 0.69s time: 119.65s eta: 19:36:32 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:12:43][INFO] [Train] (Epoch 1) Step 5/593 lr: 0.000007 loss: 0.220 loss(reduced): 0.214 grad_norm: 0.25 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32503.0 tgs: 272 data_time: 0.72s time: 119.34s eta: 19:31:28 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:14:42][INFO] [Train] (Epoch 1) Step 6/593 lr: 0.000009 loss: 0.225 loss(reduced): 0.221 grad_norm: 0.44 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31133.0 tgs: 260 data_time: 0.88s time: 119.53s eta: 19:31:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:16:45][INFO] [Train] (Epoch 1) Step 7/593 lr: 0.000010 loss: 0.211 loss(reduced): 0.205 grad_norm: 0.35 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31847.0 tgs: 259 data_time: 1.20s time: 122.94s eta: 20:02:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:18:45][INFO] [Train] (Epoch 1) Step 8/593 lr: 0.000011 loss: 0.168 loss(reduced): 0.232 grad_norm: 0.34 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32030.0 tgs: 267 data_time: 0.84s time: 119.89s eta: 19:30:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:20:46][INFO] [Train] (Epoch 1) Step 9/593 lr: 0.000013 loss: 0.220 loss(reduced): 0.215 grad_norm: 0.29 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31786.0 tgs: 263 data_time: 0.75s time: 120.65s eta: 19:36:20 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:22:46][INFO] [Train] (Epoch 1) Step 10/593 lr: 0.000014 loss: 0.296 loss(reduced): 0.210 grad_norm: 0.23 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32078.0 tgs: 267 data_time: 1.05s time: 119.91s eta: 19:27:09 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:24:46][INFO] [Train] (Epoch 1) Step 11/593 lr: 0.000016 loss: 0.188 loss(reduced): 0.204 grad_norm: 0.26 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32240.0 tgs: 267 data_time: 0.84s time: 120.46s eta: 19:30:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:26:45][INFO] [Train] (Epoch 1) Step 12/593 lr: 0.000017 loss: 0.250 loss(reduced): 0.201 grad_norm: 0.22 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31632.0 tgs: 265 data_time: 0.79s time: 119.34s eta: 19:17:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:28:45][INFO] [Train] (Epoch 1) Step 13/593 lr: 0.000019 loss: 0.207 loss(reduced): 0.206 grad_norm: 0.25 if_nan_skip: 0 max_memory: 40.6GB text_tokens: 30691.0 tgs: 256 data_time: 0.72s time: 119.70s eta: 19:19:06 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:30:44][INFO] [Train] (Epoch 1) Step 14/593 lr: 0.000020 loss: 0.215 loss(reduced): 0.206 grad_norm: 0.21 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32119.0 tgs: 269 data_time: 0.84s time: 119.18s eta: 19:12:04 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:32:43][INFO] [Train] (Epoch 1) Step 15/593 lr: 0.000020 loss: 0.172 loss(reduced): 0.198 grad_norm: 0.23 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31154.0 tgs: 261 data_time: 0.64s time: 119.22s eta: 19:10:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:34:42][INFO] [Train] (Epoch 1) Step 16/593 lr: 0.000020 loss: 0.187 loss(reduced): 0.191 grad_norm: 0.23 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31896.0 tgs: 268 data_time: 0.62s time: 118.93s eta: 19:05:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:36:42][INFO] [Train] (Epoch 1) Step 17/593 lr: 0.000020 loss: 0.225 loss(reduced): 0.205 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31357.0 tgs: 263 data_time: 0.88s time: 119.15s eta: 19:05:52 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:38:41][INFO] [Train] (Epoch 1) Step 18/593 lr: 0.000020 loss: 0.146 loss(reduced): 0.192 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30112.0 tgs: 252 data_time: 0.68s time: 119.17s eta: 19:04:03 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:40:40][INFO] [Train] (Epoch 1) Step 19/593 lr: 0.000020 loss: 0.169 loss(reduced): 0.192 grad_norm: 0.17 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31775.0 tgs: 266 data_time: 0.85s time: 119.39s eta: 19:04:10 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:42:39][INFO] [Train] (Epoch 1) Step 20/593 lr: 0.000020 loss: 0.168 loss(reduced): 0.198 grad_norm: 0.15 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32263.0 tgs: 270 data_time: 0.78s time: 119.34s eta: 19:01:39 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:44:40][INFO] [Train] (Epoch 1) Step 21/593 lr: 0.000020 loss: 0.204 loss(reduced): 0.189 grad_norm: 0.16 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31621.0 tgs: 262 data_time: 1.05s time: 120.34s eta: 19:09:15 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:46:43][INFO] [Train] (Epoch 1) Step 22/593 lr: 0.000020 loss: 0.189 loss(reduced): 0.198 grad_norm: 0.16 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32165.0 tgs: 261 data_time: 1.06s time: 122.88s eta: 19:31:28 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:48:42][INFO] [Train] (Epoch 1) Step 23/593 lr: 0.000020 loss: 0.228 loss(reduced): 0.187 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31731.0 tgs: 265 data_time: 0.57s time: 119.48s eta: 18:57:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:50:41][INFO] [Train] (Epoch 1) Step 24/593 lr: 0.000020 loss: 0.173 loss(reduced): 0.190 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31953.0 tgs: 268 data_time: 0.71s time: 119.15s eta: 18:51:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:52:41][INFO] [Train] (Epoch 1) Step 25/593 lr: 0.000020 loss: 0.175 loss(reduced): 0.186 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32071.0 tgs: 268 data_time: 0.79s time: 119.24s eta: 18:50:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:54:40][INFO] [Train] (Epoch 1) Step 26/593 lr: 0.000020 loss: 0.164 loss(reduced): 0.188 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32232.0 tgs: 269 data_time: 0.59s time: 119.40s eta: 18:50:21 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:56:40][INFO] [Train] (Epoch 1) Step 27/593 lr: 0.000020 loss: 0.236 loss(reduced): 0.195 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31872.0 tgs: 266 data_time: 0.67s time: 119.66s eta: 18:50:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 17:58:39][INFO] [Train] (Epoch 1) Step 28/593 lr: 0.000020 loss: 0.143 loss(reduced): 0.184 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31715.0 tgs: 265 data_time: 0.79s time: 119.38s eta: 18:46:10 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:00:39][INFO] [Train] (Epoch 1) Step 29/593 lr: 0.000020 loss: 0.142 loss(reduced): 0.183 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31142.0 tgs: 259 data_time: 0.67s time: 119.79s eta: 18:48:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:02:48][INFO] [Train] (Epoch 1) Step 30/593 lr: 0.000020 loss: 0.178 loss(reduced): 0.187 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31246.0 tgs: 241 data_time: 1.07s time: 129.31s eta: 20:15:31 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:04:48][INFO] [Train] (Epoch 1) Step 31/593 lr: 0.000020 loss: 0.224 loss(reduced): 0.188 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31427.0 tgs: 262 data_time: 0.85s time: 119.56s eta: 18:41:50 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:06:47][INFO] [Train] (Epoch 1) Step 32/593 lr: 0.000020 loss: 0.173 loss(reduced): 0.183 grad_norm: 0.16 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31009.0 tgs: 259 data_time: 0.83s time: 119.39s eta: 18:38:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:08:48][INFO] [Train] (Epoch 1) Step 33/593 lr: 0.000020 loss: 0.137 loss(reduced): 0.182 grad_norm: 0.15 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31125.0 tgs: 257 data_time: 0.69s time: 120.65s eta: 18:48:06 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:10:47][INFO] [Train] (Epoch 1) Step 34/593 lr: 0.000020 loss: 0.188 loss(reduced): 0.182 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32138.0 tgs: 269 data_time: 0.89s time: 119.39s eta: 18:34:20 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:12:46][INFO] [Train] (Epoch 1) Step 35/593 lr: 0.000020 loss: 0.182 loss(reduced): 0.182 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31588.0 tgs: 265 data_time: 0.92s time: 119.16s eta: 18:30:09 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:14:45][INFO] [Train] (Epoch 1) Step 36/593 lr: 0.000020 loss: 0.173 loss(reduced): 0.182 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31748.0 tgs: 266 data_time: 0.87s time: 119.08s eta: 18:27:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:16:45][INFO] [Train] (Epoch 1) Step 37/593 lr: 0.000020 loss: 0.161 loss(reduced): 0.177 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32016.0 tgs: 268 data_time: 0.92s time: 119.14s eta: 18:25:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:18:45][INFO] [Train] (Epoch 1) Step 38/593 lr: 0.000020 loss: 0.208 loss(reduced): 0.183 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31540.0 tgs: 261 data_time: 0.73s time: 120.39s eta: 18:35:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:20:46][INFO] [Train] (Epoch 1) Step 39/593 lr: 0.000020 loss: 0.135 loss(reduced): 0.176 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30666.0 tgs: 253 data_time: 0.80s time: 121.18s eta: 18:40:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:22:47][INFO] [Train] (Epoch 1) Step 40/593 lr: 0.000020 loss: 0.166 loss(reduced): 0.176 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31939.0 tgs: 264 data_time: 0.74s time: 120.65s eta: 18:33:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:24:47][INFO] [Train] (Epoch 1) Step 41/593 lr: 0.000020 loss: 0.172 loss(reduced): 0.180 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 30286.0 tgs: 252 data_time: 0.79s time: 119.73s eta: 18:23:32 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:26:50][INFO] [Train] (Epoch 1) Step 42/593 lr: 0.000020 loss: 0.186 loss(reduced): 0.194 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32181.0 tgs: 259 data_time: 0.86s time: 123.88s eta: 18:59:39 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:28:50][INFO] [Train] (Epoch 1) Step 43/593 lr: 0.000020 loss: 0.197 loss(reduced): 0.176 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32188.0 tgs: 269 data_time: 0.72s time: 119.56s eta: 18:17:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:30:49][INFO] [Train] (Epoch 1) Step 44/593 lr: 0.000020 loss: 0.173 loss(reduced): 0.171 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32373.0 tgs: 271 data_time: 0.68s time: 119.36s eta: 18:14:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:32:49][INFO] [Train] (Epoch 1) Step 45/593 lr: 0.000020 loss: 0.144 loss(reduced): 0.175 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 30468.0 tgs: 255 data_time: 0.61s time: 119.22s eta: 18:10:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:34:48][INFO] [Train] (Epoch 1) Step 46/593 lr: 0.000020 loss: 0.198 loss(reduced): 0.173 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31398.0 tgs: 262 data_time: 0.74s time: 119.60s eta: 18:12:23 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:36:48][INFO] [Train] (Epoch 1) Step 47/593 lr: 0.000020 loss: 0.140 loss(reduced): 0.175 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30639.0 tgs: 256 data_time: 0.56s time: 119.26s eta: 18:07:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:38:47][INFO] [Train] (Epoch 1) Step 48/593 lr: 0.000020 loss: 0.146 loss(reduced): 0.171 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32054.0 tgs: 269 data_time: 0.92s time: 119.02s eta: 18:03:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:40:46][INFO] [Train] (Epoch 1) Step 49/593 lr: 0.000020 loss: 0.127 loss(reduced): 0.181 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31329.0 tgs: 263 data_time: 0.89s time: 119.09s eta: 18:01:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:42:45][INFO] [Train] (Epoch 1) Step 50/593 lr: 0.000020 loss: 0.160 loss(reduced): 0.184 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 30789.0 tgs: 258 data_time: 0.74s time: 119.26s eta: 18:01:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:44:44][INFO] [Train] (Epoch 1) Step 51/593 lr: 0.000020 loss: 0.182 loss(reduced): 0.175 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31630.0 tgs: 265 data_time: 0.77s time: 119.33s eta: 17:59:57 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:46:44][INFO] [Train] (Epoch 1) Step 52/593 lr: 0.000020 loss: 0.137 loss(reduced): 0.185 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31039.0 tgs: 258 data_time: 0.78s time: 120.02s eta: 18:04:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:48:43][INFO] [Train] (Epoch 1) Step 53/593 lr: 0.000020 loss: 0.187 loss(reduced): 0.173 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31956.0 tgs: 268 data_time: 1.01s time: 119.10s eta: 17:53:53 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:50:43][INFO] [Train] (Epoch 1) Step 54/593 lr: 0.000020 loss: 0.184 loss(reduced): 0.169 grad_norm: 0.15 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32128.0 tgs: 269 data_time: 0.66s time: 119.40s eta: 17:54:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:52:42][INFO] [Train] (Epoch 1) Step 55/593 lr: 0.000020 loss: 0.186 loss(reduced): 0.179 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31665.0 tgs: 266 data_time: 0.65s time: 119.00s eta: 17:49:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:54:41][INFO] [Train] (Epoch 1) Step 56/593 lr: 0.000020 loss: 0.133 loss(reduced): 0.176 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31563.0 tgs: 265 data_time: 0.69s time: 119.02s eta: 17:47:13 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:56:46][INFO] [Train] (Epoch 1) Step 57/593 lr: 0.000020 loss: 0.125 loss(reduced): 0.176 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32095.0 tgs: 256 data_time: 0.67s time: 125.36s eta: 18:41:58 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 18:58:46][INFO] [Train] (Epoch 1) Step 58/593 lr: 0.000020 loss: 0.175 loss(reduced): 0.177 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32082.0 tgs: 267 data_time: 1.04s time: 120.10s eta: 17:52:52 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:00:46][INFO] [Train] (Epoch 1) Step 59/593 lr: 0.000020 loss: 0.184 loss(reduced): 0.178 grad_norm: 0.15 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32250.0 tgs: 270 data_time: 0.67s time: 119.40s eta: 17:44:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:02:45][INFO] [Train] (Epoch 1) Step 60/593 lr: 0.000020 loss: 0.206 loss(reduced): 0.175 grad_norm: 0.21 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31468.0 tgs: 263 data_time: 0.78s time: 119.35s eta: 17:42:14 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:04:45][INFO] [Train] (Epoch 1) Step 61/593 lr: 0.000020 loss: 0.180 loss(reduced): 0.177 grad_norm: 0.17 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31281.0 tgs: 261 data_time: 0.84s time: 119.50s eta: 17:41:35 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:06:51][INFO] [Train] (Epoch 1) Step 62/593 lr: 0.000020 loss: 0.152 loss(reduced): 0.180 grad_norm: 0.16 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31987.0 tgs: 253 data_time: 0.53s time: 126.03s eta: 18:37:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:08:53][INFO] [Train] (Epoch 1) Step 63/593 lr: 0.000020 loss: 0.138 loss(reduced): 0.170 grad_norm: 0.29 if_nan_skip: 0 max_memory: 40.7GB text_tokens: 30618.0 tgs: 249 data_time: 0.59s time: 122.65s eta: 18:05:27 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:10:54][INFO] [Train] (Epoch 1) Step 64/593 lr: 0.000020 loss: 0.120 loss(reduced): 0.180 grad_norm: 0.26 if_nan_skip: 0 max_memory: 40.8GB text_tokens: 30691.0 tgs: 253 data_time: 0.63s time: 121.16s eta: 17:50:13 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:12:54][INFO] [Train] (Epoch 1) Step 65/593 lr: 0.000020 loss: 0.178 loss(reduced): 0.173 grad_norm: 0.27 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30766.0 tgs: 258 data_time: 0.60s time: 119.03s eta: 17:29:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:14:52][INFO] [Train] (Epoch 1) Step 66/593 lr: 0.000020 loss: 0.127 loss(reduced): 0.177 grad_norm: 0.35 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 30736.0 tgs: 258 data_time: 0.74s time: 118.86s eta: 17:26:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:16:56][INFO] [Train] (Epoch 1) Step 67/593 lr: 0.000020 loss: 0.159 loss(reduced): 0.181 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32451.0 tgs: 261 data_time: 0.72s time: 123.99s eta: 18:09:01 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:19:01][INFO] [Train] (Epoch 1) Step 68/593 lr: 0.000020 loss: 0.161 loss(reduced): 0.173 grad_norm: 0.32 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 30934.0 tgs: 248 data_time: 0.91s time: 124.57s eta: 18:12:01 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:21:00][INFO] [Train] (Epoch 1) Step 69/593 lr: 0.000020 loss: 0.167 loss(reduced): 0.178 grad_norm: 0.20 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32071.0 tgs: 268 data_time: 0.75s time: 119.40s eta: 17:24:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:23:00][INFO] [Train] (Epoch 1) Step 70/593 lr: 0.000020 loss: 0.171 loss(reduced): 0.187 grad_norm: 0.24 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31757.0 tgs: 265 data_time: 0.93s time: 119.69s eta: 17:25:15 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:25:00][INFO] [Train] (Epoch 1) Step 71/593 lr: 0.000020 loss: 0.167 loss(reduced): 0.172 grad_norm: 0.25 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32251.0 tgs: 269 data_time: 0.86s time: 119.56s eta: 17:22:10 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:26:58][INFO] [Train] (Epoch 1) Step 72/593 lr: 0.000020 loss: 0.175 loss(reduced): 0.174 grad_norm: 0.26 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32034.0 tgs: 269 data_time: 0.56s time: 118.84s eta: 17:13:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:28:58][INFO] [Train] (Epoch 1) Step 73/593 lr: 0.000020 loss: 0.153 loss(reduced): 0.174 grad_norm: 0.20 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31330.0 tgs: 261 data_time: 0.78s time: 119.85s eta: 17:20:40 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:30:58][INFO] [Train] (Epoch 1) Step 74/593 lr: 0.000020 loss: 0.131 loss(reduced): 0.176 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 30465.0 tgs: 254 data_time: 0.85s time: 119.55s eta: 17:16:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:33:00][INFO] [Train] (Epoch 1) Step 75/593 lr: 0.000020 loss: 0.179 loss(reduced): 0.179 grad_norm: 0.22 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32040.0 tgs: 262 data_time: 0.82s time: 121.85s eta: 17:34:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:35:03][INFO] [Train] (Epoch 1) Step 76/593 lr: 0.000020 loss: 0.190 loss(reduced): 0.171 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 32084.0 tgs: 259 data_time: 1.03s time: 123.51s eta: 17:46:20 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:37:02][INFO] [Train] (Epoch 1) Step 77/593 lr: 0.000020 loss: 0.190 loss(reduced): 0.190 grad_norm: 0.22 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31808.0 tgs: 267 data_time: 0.77s time: 119.01s eta: 17:05:28 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:39:02][INFO] [Train] (Epoch 1) Step 78/593 lr: 0.000020 loss: 0.132 loss(reduced): 0.172 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31071.0 tgs: 260 data_time: 0.67s time: 119.23s eta: 17:05:22 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:41:01][INFO] [Train] (Epoch 1) Step 79/593 lr: 0.000020 loss: 0.171 loss(reduced): 0.181 grad_norm: 0.21 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31850.0 tgs: 267 data_time: 0.79s time: 119.13s eta: 17:02:30 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:43:00][INFO] [Train] (Epoch 1) Step 80/593 lr: 0.000020 loss: 0.142 loss(reduced): 0.174 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31739.0 tgs: 266 data_time: 0.49s time: 119.26s eta: 17:01:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:44:59][INFO] [Train] (Epoch 1) Step 81/593 lr: 0.000020 loss: 0.152 loss(reduced): 0.184 grad_norm: 0.15 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31390.0 tgs: 263 data_time: 0.96s time: 119.04s eta: 16:57:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:46:58][INFO] [Train] (Epoch 1) Step 82/593 lr: 0.000020 loss: 0.129 loss(reduced): 0.167 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31834.0 tgs: 266 data_time: 0.86s time: 119.27s eta: 16:57:47 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:48:57][INFO] [Train] (Epoch 1) Step 83/593 lr: 0.000020 loss: 0.182 loss(reduced): 0.169 grad_norm: 0.19 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32291.0 tgs: 271 data_time: 0.77s time: 119.15s eta: 16:54:47 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:50:57][INFO] [Train] (Epoch 1) Step 84/593 lr: 0.000020 loss: 0.123 loss(reduced): 0.174 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31393.0 tgs: 262 data_time: 0.83s time: 119.58s eta: 16:56:26 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:52:56][INFO] [Train] (Epoch 1) Step 85/593 lr: 0.000020 loss: 0.211 loss(reduced): 0.173 grad_norm: 0.15 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31864.0 tgs: 267 data_time: 0.75s time: 119.13s eta: 16:50:36 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:54:55][INFO] [Train] (Epoch 1) Step 86/593 lr: 0.000019 loss: 0.161 loss(reduced): 0.171 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31171.0 tgs: 262 data_time: 0.90s time: 118.84s eta: 16:46:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:56:54][INFO] [Train] (Epoch 1) Step 87/593 lr: 0.000019 loss: 0.149 loss(reduced): 0.173 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31869.0 tgs: 267 data_time: 0.61s time: 119.02s eta: 16:45:40 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 19:58:54][INFO] [Train] (Epoch 1) Step 88/593 lr: 0.000019 loss: 0.164 loss(reduced): 0.164 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31356.0 tgs: 261 data_time: 0.69s time: 119.85s eta: 16:50:42 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:00:53][INFO] [Train] (Epoch 1) Step 89/593 lr: 0.000019 loss: 0.143 loss(reduced): 0.169 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30061.0 tgs: 251 data_time: 0.59s time: 119.40s eta: 16:44:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:02:52][INFO] [Train] (Epoch 1) Step 90/593 lr: 0.000019 loss: 0.154 loss(reduced): 0.167 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 30866.0 tgs: 259 data_time: 0.68s time: 119.04s eta: 16:39:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:04:53][INFO] [Train] (Epoch 1) Step 91/593 lr: 0.000019 loss: 0.145 loss(reduced): 0.167 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 30484.0 tgs: 252 data_time: 0.60s time: 120.90s eta: 16:53:35 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:06:52][INFO] [Train] (Epoch 1) Step 92/593 lr: 0.000019 loss: 0.180 loss(reduced): 0.171 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31636.0 tgs: 265 data_time: 0.81s time: 118.99s eta: 16:35:30 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:08:52][INFO] [Train] (Epoch 1) Step 93/593 lr: 0.000019 loss: 0.148 loss(reduced): 0.169 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 30733.0 tgs: 257 data_time: 0.70s time: 119.31s eta: 16:36:14 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:10:51][INFO] [Train] (Epoch 1) Step 94/593 lr: 0.000019 loss: 0.196 loss(reduced): 0.177 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31992.0 tgs: 268 data_time: 0.73s time: 119.20s eta: 16:33:20 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:12:50][INFO] [Train] (Epoch 1) Step 95/593 lr: 0.000019 loss: 0.134 loss(reduced): 0.167 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.6GB text_tokens: 31737.0 tgs: 265 data_time: 0.53s time: 119.36s eta: 16:32:42 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:14:50][INFO] [Train] (Epoch 1) Step 96/593 lr: 0.000019 loss: 0.141 loss(reduced): 0.169 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31407.0 tgs: 262 data_time: 0.66s time: 119.70s eta: 16:33:32 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:16:49][INFO] [Train] (Epoch 1) Step 97/593 lr: 0.000019 loss: 0.170 loss(reduced): 0.167 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31659.0 tgs: 265 data_time: 0.85s time: 119.22s eta: 16:27:31 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:18:49][INFO] [Train] (Epoch 1) Step 98/593 lr: 0.000019 loss: 0.283 loss(reduced): 0.167 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 30924.0 tgs: 258 data_time: 0.92s time: 119.76s eta: 16:30:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:20:51][INFO] [Train] (Epoch 1) Step 99/593 lr: 0.000019 loss: 0.121 loss(reduced): 0.174 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31382.0 tgs: 264 data_time: 0.84s time: 118.54s eta: 16:17:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:22:50][INFO] [Train] (Epoch 1) Step 100/593 lr: 0.000019 loss: 0.149 loss(reduced): 0.174 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31981.0 tgs: 268 data_time: 0.73s time: 118.97s eta: 16:19:31 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:24:57][INFO] [Train] (Epoch 1) Step 101/593 lr: 0.000019 loss: 0.113 loss(reduced): 0.170 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32293.0 tgs: 253 data_time: 0.58s time: 127.33s eta: 17:26:12 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:26:56][INFO] [Train] (Epoch 1) Step 102/593 lr: 0.000019 loss: 0.137 loss(reduced): 0.170 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32237.0 tgs: 270 data_time: 0.87s time: 119.25s eta: 16:17:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:28:56][INFO] [Train] (Epoch 1) Step 103/593 lr: 0.000019 loss: 0.132 loss(reduced): 0.172 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32180.0 tgs: 270 data_time: 0.94s time: 119.11s eta: 16:14:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:30:55][INFO] [Train] (Epoch 1) Step 104/593 lr: 0.000019 loss: 0.124 loss(reduced): 0.166 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30992.0 tgs: 259 data_time: 0.70s time: 119.24s eta: 16:13:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:32:54][INFO] [Train] (Epoch 1) Step 105/593 lr: 0.000019 loss: 0.234 loss(reduced): 0.168 grad_norm: 0.08 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 31681.0 tgs: 265 data_time: 0.67s time: 119.18s eta: 16:11:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:34:53][INFO] [Train] (Epoch 1) Step 106/593 lr: 0.000019 loss: 0.222 loss(reduced): 0.170 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30803.0 tgs: 258 data_time: 0.63s time: 118.94s eta: 16:07:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:36:52][INFO] [Train] (Epoch 1) Step 107/593 lr: 0.000019 loss: 0.134 loss(reduced): 0.175 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31932.0 tgs: 268 data_time: 0.47s time: 118.86s eta: 16:04:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:38:51][INFO] [Train] (Epoch 1) Step 108/593 lr: 0.000019 loss: 0.151 loss(reduced): 0.176 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31763.0 tgs: 266 data_time: 0.63s time: 118.97s eta: 16:03:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:40:53][INFO] [Train] (Epoch 1) Step 109/593 lr: 0.000019 loss: 0.124 loss(reduced): 0.165 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31640.0 tgs: 259 data_time: 0.94s time: 121.85s eta: 16:24:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:42:52][INFO] [Train] (Epoch 1) Step 110/593 lr: 0.000019 loss: 0.138 loss(reduced): 0.165 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30871.0 tgs: 259 data_time: 0.83s time: 118.98s eta: 15:59:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:44:51][INFO] [Train] (Epoch 1) Step 111/593 lr: 0.000019 loss: 0.147 loss(reduced): 0.168 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31886.0 tgs: 267 data_time: 0.62s time: 119.22s eta: 15:59:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:46:50][INFO] [Train] (Epoch 1) Step 112/593 lr: 0.000019 loss: 0.209 loss(reduced): 0.162 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32203.0 tgs: 269 data_time: 0.80s time: 119.62s eta: 16:00:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:48:49][INFO] [Train] (Epoch 1) Step 113/593 lr: 0.000019 loss: 0.174 loss(reduced): 0.170 grad_norm: 0.15 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31644.0 tgs: 265 data_time: 0.92s time: 119.04s eta: 15:54:16 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:50:49][INFO] [Train] (Epoch 1) Step 114/593 lr: 0.000019 loss: 0.184 loss(reduced): 0.172 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31471.0 tgs: 264 data_time: 0.85s time: 119.00s eta: 15:52:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:52:48][INFO] [Train] (Epoch 1) Step 115/593 lr: 0.000019 loss: 0.198 loss(reduced): 0.164 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32058.0 tgs: 269 data_time: 0.73s time: 119.15s eta: 15:51:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:54:47][INFO] [Train] (Epoch 1) Step 116/593 lr: 0.000019 loss: 0.127 loss(reduced): 0.177 grad_norm: 0.19 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31276.0 tgs: 261 data_time: 0.55s time: 119.64s eta: 15:53:07 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:56:46][INFO] [Train] (Epoch 1) Step 117/593 lr: 0.000019 loss: 0.192 loss(reduced): 0.170 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31670.0 tgs: 266 data_time: 0.78s time: 118.93s eta: 15:45:30 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 20:58:46][INFO] [Train] (Epoch 1) Step 118/593 lr: 0.000019 loss: 0.117 loss(reduced): 0.164 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31336.0 tgs: 262 data_time: 0.73s time: 119.29s eta: 15:46:21 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:00:45][INFO] [Train] (Epoch 1) Step 119/593 lr: 0.000019 loss: 0.151 loss(reduced): 0.168 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31883.0 tgs: 267 data_time: 0.81s time: 119.13s eta: 15:43:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:02:44][INFO] [Train] (Epoch 1) Step 120/593 lr: 0.000019 loss: 0.159 loss(reduced): 0.171 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32124.0 tgs: 269 data_time: 0.54s time: 119.00s eta: 15:40:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:04:43][INFO] [Train] (Epoch 1) Step 121/593 lr: 0.000019 loss: 0.191 loss(reduced): 0.175 grad_norm: 0.15 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31894.0 tgs: 266 data_time: 1.11s time: 119.66s eta: 15:43:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:06:45][INFO] [Train] (Epoch 1) Step 122/593 lr: 0.000019 loss: 0.141 loss(reduced): 0.163 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30979.0 tgs: 254 data_time: 0.60s time: 121.53s eta: 15:56:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:08:44][INFO] [Train] (Epoch 1) Step 123/593 lr: 0.000019 loss: 0.185 loss(reduced): 0.171 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32020.0 tgs: 268 data_time: 1.00s time: 119.38s eta: 15:37:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:10:54][INFO] [Train] (Epoch 1) Step 124/593 lr: 0.000019 loss: 0.171 loss(reduced): 0.182 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30678.0 tgs: 236 data_time: 0.70s time: 129.96s eta: 16:58:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:12:54][INFO] [Train] (Epoch 1) Step 125/593 lr: 0.000019 loss: 0.176 loss(reduced): 0.165 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31688.0 tgs: 265 data_time: 0.84s time: 119.47s eta: 15:33:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:14:53][INFO] [Train] (Epoch 1) Step 126/593 lr: 0.000019 loss: 0.188 loss(reduced): 0.168 grad_norm: 0.23 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32488.0 tgs: 271 data_time: 0.61s time: 119.60s eta: 15:32:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:16:53][INFO] [Train] (Epoch 1) Step 127/593 lr: 0.000019 loss: 0.186 loss(reduced): 0.168 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31712.0 tgs: 265 data_time: 0.71s time: 119.53s eta: 15:30:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:18:55][INFO] [Train] (Epoch 1) Step 128/593 lr: 0.000019 loss: 0.142 loss(reduced): 0.179 grad_norm: 0.16 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32353.0 tgs: 264 data_time: 0.82s time: 122.15s eta: 15:48:41 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:20:56][INFO] [Train] (Epoch 1) Step 129/593 lr: 0.000019 loss: 0.173 loss(reduced): 0.182 grad_norm: 0.20 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31371.0 tgs: 259 data_time: 0.67s time: 120.74s eta: 15:35:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:22:55][INFO] [Train] (Epoch 1) Step 130/593 lr: 0.000019 loss: 0.180 loss(reduced): 0.168 grad_norm: 0.08 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 31162.0 tgs: 260 data_time: 0.80s time: 119.41s eta: 15:23:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:24:55][INFO] [Train] (Epoch 1) Step 131/593 lr: 0.000019 loss: 0.190 loss(reduced): 0.169 grad_norm: 0.21 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31792.0 tgs: 266 data_time: 0.60s time: 119.45s eta: 15:21:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:27:06][INFO] [Train] (Epoch 1) Step 132/593 lr: 0.000019 loss: 0.207 loss(reduced): 0.168 grad_norm: 0.21 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31707.0 tgs: 240 data_time: 0.77s time: 131.64s eta: 16:53:36 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:29:06][INFO] [Train] (Epoch 1) Step 133/593 lr: 0.000019 loss: 0.196 loss(reduced): 0.165 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31151.0 tgs: 261 data_time: 0.62s time: 119.29s eta: 15:16:32 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:31:05][INFO] [Train] (Epoch 1) Step 134/593 lr: 0.000019 loss: 0.148 loss(reduced): 0.162 grad_norm: 0.21 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31976.0 tgs: 267 data_time: 0.70s time: 119.53s eta: 15:16:21 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:33:07][INFO] [Train] (Epoch 1) Step 135/593 lr: 0.000019 loss: 0.178 loss(reduced): 0.163 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31284.0 tgs: 255 data_time: 0.80s time: 122.26s eta: 15:35:15 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:35:07][INFO] [Train] (Epoch 1) Step 136/593 lr: 0.000019 loss: 0.101 loss(reduced): 0.159 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31315.0 tgs: 261 data_time: 0.58s time: 119.72s eta: 15:13:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:37:07][INFO] [Train] (Epoch 1) Step 137/593 lr: 0.000019 loss: 0.172 loss(reduced): 0.167 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32406.0 tgs: 271 data_time: 0.79s time: 119.34s eta: 15:08:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:39:05][INFO] [Train] (Epoch 1) Step 138/593 lr: 0.000018 loss: 0.156 loss(reduced): 0.176 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 32148.0 tgs: 270 data_time: 0.96s time: 118.90s eta: 15:03:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:41:04][INFO] [Train] (Epoch 1) Step 139/593 lr: 0.000018 loss: 0.198 loss(reduced): 0.167 grad_norm: 0.15 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31696.0 tgs: 266 data_time: 0.68s time: 118.99s eta: 15:02:20 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:43:04][INFO] [Train] (Epoch 1) Step 140/593 lr: 0.000018 loss: 0.223 loss(reduced): 0.168 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31056.0 tgs: 260 data_time: 0.68s time: 119.30s eta: 15:02:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:45:03][INFO] [Train] (Epoch 1) Step 141/593 lr: 0.000018 loss: 0.123 loss(reduced): 0.161 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32069.0 tgs: 269 data_time: 1.01s time: 118.91s eta: 14:57:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:47:03][INFO] [Train] (Epoch 1) Step 142/593 lr: 0.000018 loss: 0.155 loss(reduced): 0.172 grad_norm: 0.08 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 30466.0 tgs: 252 data_time: 0.58s time: 120.52s eta: 15:07:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:49:03][INFO] [Train] (Epoch 1) Step 143/593 lr: 0.000018 loss: 0.168 loss(reduced): 0.168 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32374.0 tgs: 271 data_time: 0.77s time: 119.32s eta: 14:56:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:51:05][INFO] [Train] (Epoch 1) Step 144/593 lr: 0.000018 loss: 0.250 loss(reduced): 0.183 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31709.0 tgs: 258 data_time: 0.86s time: 122.66s eta: 15:19:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:53:04][INFO] [Train] (Epoch 1) Step 145/593 lr: 0.000018 loss: 0.170 loss(reduced): 0.164 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31008.0 tgs: 260 data_time: 0.77s time: 119.12s eta: 14:51:26 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:55:04][INFO] [Train] (Epoch 1) Step 146/593 lr: 0.000018 loss: 0.139 loss(reduced): 0.168 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30353.0 tgs: 253 data_time: 0.91s time: 119.75s eta: 14:54:06 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:57:04][INFO] [Train] (Epoch 1) Step 147/593 lr: 0.000018 loss: 0.174 loss(reduced): 0.165 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32000.0 tgs: 267 data_time: 0.73s time: 119.47s eta: 14:50:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 21:59:03][INFO] [Train] (Epoch 1) Step 148/593 lr: 0.000018 loss: 0.212 loss(reduced): 0.177 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31426.0 tgs: 264 data_time: 0.78s time: 118.98s eta: 14:44:26 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:01:02][INFO] [Train] (Epoch 1) Step 149/593 lr: 0.000018 loss: 0.158 loss(reduced): 0.162 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31813.0 tgs: 266 data_time: 0.66s time: 119.36s eta: 14:45:13 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:03:01][INFO] [Train] (Epoch 1) Step 150/593 lr: 0.000018 loss: 0.247 loss(reduced): 0.162 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31563.0 tgs: 265 data_time: 0.77s time: 118.97s eta: 14:40:21 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:05:00][INFO] [Train] (Epoch 1) Step 151/593 lr: 0.000018 loss: 0.171 loss(reduced): 0.169 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32232.0 tgs: 270 data_time: 0.91s time: 119.03s eta: 14:38:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:06:59][INFO] [Train] (Epoch 1) Step 152/593 lr: 0.000018 loss: 0.177 loss(reduced): 0.166 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31431.0 tgs: 263 data_time: 0.97s time: 119.06s eta: 14:37:04 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:08:58][INFO] [Train] (Epoch 1) Step 153/593 lr: 0.000018 loss: 0.155 loss(reduced): 0.162 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31648.0 tgs: 265 data_time: 0.92s time: 119.13s eta: 14:35:35 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:10:57][INFO] [Train] (Epoch 1) Step 154/593 lr: 0.000018 loss: 0.198 loss(reduced): 0.159 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 30483.0 tgs: 256 data_time: 0.65s time: 119.00s eta: 14:32:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:12:56][INFO] [Train] (Epoch 1) Step 155/593 lr: 0.000018 loss: 0.173 loss(reduced): 0.167 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31386.0 tgs: 263 data_time: 0.71s time: 119.07s eta: 14:31:12 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:14:55][INFO] [Train] (Epoch 1) Step 156/593 lr: 0.000018 loss: 0.133 loss(reduced): 0.166 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32018.0 tgs: 268 data_time: 0.85s time: 119.14s eta: 14:29:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:16:55][INFO] [Train] (Epoch 1) Step 157/593 lr: 0.000018 loss: 0.175 loss(reduced): 0.166 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32164.0 tgs: 268 data_time: 0.89s time: 119.65s eta: 14:31:27 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:18:54][INFO] [Train] (Epoch 1) Step 158/593 lr: 0.000018 loss: 0.179 loss(reduced): 0.167 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31083.0 tgs: 260 data_time: 0.66s time: 119.17s eta: 14:25:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:20:53][INFO] [Train] (Epoch 1) Step 159/593 lr: 0.000018 loss: 0.214 loss(reduced): 0.169 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31336.0 tgs: 262 data_time: 0.95s time: 119.20s eta: 14:24:10 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:22:53][INFO] [Train] (Epoch 1) Step 160/593 lr: 0.000018 loss: 0.121 loss(reduced): 0.156 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32214.0 tgs: 269 data_time: 0.83s time: 119.39s eta: 14:23:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:24:54][INFO] [Train] (Epoch 1) Step 161/593 lr: 0.000018 loss: 0.134 loss(reduced): 0.169 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32099.0 tgs: 265 data_time: 0.90s time: 120.74s eta: 14:31:20 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:26:52][INFO] [Train] (Epoch 1) Step 162/593 lr: 0.000018 loss: 0.171 loss(reduced): 0.165 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31602.0 tgs: 265 data_time: 0.80s time: 118.88s eta: 14:15:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:28:52][INFO] [Train] (Epoch 1) Step 163/593 lr: 0.000018 loss: 0.154 loss(reduced): 0.169 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31309.0 tgs: 262 data_time: 0.88s time: 119.10s eta: 14:15:31 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:30:51][INFO] [Train] (Epoch 1) Step 164/593 lr: 0.000018 loss: 0.135 loss(reduced): 0.161 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32032.0 tgs: 268 data_time: 0.59s time: 119.13s eta: 14:13:47 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:32:51][INFO] [Train] (Epoch 1) Step 165/593 lr: 0.000018 loss: 0.178 loss(reduced): 0.179 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31071.0 tgs: 259 data_time: 0.46s time: 119.95s eta: 14:17:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:34:51][INFO] [Train] (Epoch 1) Step 166/593 lr: 0.000018 loss: 0.172 loss(reduced): 0.166 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32041.0 tgs: 267 data_time: 1.22s time: 119.87s eta: 14:15:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:36:50][INFO] [Train] (Epoch 1) Step 167/593 lr: 0.000018 loss: 0.286 loss(reduced): 0.175 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32454.0 tgs: 271 data_time: 0.71s time: 119.58s eta: 14:11:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:38:49][INFO] [Train] (Epoch 1) Step 168/593 lr: 0.000018 loss: 0.150 loss(reduced): 0.160 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31188.0 tgs: 261 data_time: 0.74s time: 119.21s eta: 14:06:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:40:49][INFO] [Train] (Epoch 1) Step 169/593 lr: 0.000018 loss: 0.150 loss(reduced): 0.157 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31774.0 tgs: 266 data_time: 0.55s time: 119.27s eta: 14:04:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:42:48][INFO] [Train] (Epoch 1) Step 170/593 lr: 0.000018 loss: 0.221 loss(reduced): 0.170 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31335.0 tgs: 262 data_time: 0.76s time: 119.26s eta: 14:02:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:44:47][INFO] [Train] (Epoch 1) Step 171/593 lr: 0.000018 loss: 0.159 loss(reduced): 0.166 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 30880.0 tgs: 259 data_time: 0.63s time: 119.14s eta: 13:59:57 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:46:46][INFO] [Train] (Epoch 1) Step 172/593 lr: 0.000018 loss: 0.134 loss(reduced): 0.161 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31463.0 tgs: 264 data_time: 0.69s time: 119.13s eta: 13:57:53 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:48:45][INFO] [Train] (Epoch 1) Step 173/593 lr: 0.000018 loss: 0.142 loss(reduced): 0.164 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31792.0 tgs: 266 data_time: 0.78s time: 119.16s eta: 13:56:06 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:50:45][INFO] [Train] (Epoch 1) Step 174/593 lr: 0.000018 loss: 0.182 loss(reduced): 0.166 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31736.0 tgs: 265 data_time: 0.58s time: 119.34s eta: 13:55:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:52:45][INFO] [Train] (Epoch 1) Step 175/593 lr: 0.000018 loss: 0.186 loss(reduced): 0.173 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31332.0 tgs: 260 data_time: 0.51s time: 120.11s eta: 13:58:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:54:47][INFO] [Train] (Epoch 1) Step 176/593 lr: 0.000017 loss: 0.220 loss(reduced): 0.164 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31769.0 tgs: 259 data_time: 0.87s time: 122.56s eta: 14:13:50 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:56:47][INFO] [Train] (Epoch 1) Step 177/593 lr: 0.000017 loss: 0.108 loss(reduced): 0.165 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30464.0 tgs: 255 data_time: 0.54s time: 119.11s eta: 13:47:47 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 22:58:46][INFO] [Train] (Epoch 1) Step 178/593 lr: 0.000017 loss: 0.136 loss(reduced): 0.170 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31971.0 tgs: 267 data_time: 0.98s time: 119.36s eta: 13:47:33 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:00:45][INFO] [Train] (Epoch 1) Step 179/593 lr: 0.000017 loss: 0.164 loss(reduced): 0.159 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31019.0 tgs: 259 data_time: 0.87s time: 119.38s eta: 13:45:42 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:02:45][INFO] [Train] (Epoch 1) Step 180/593 lr: 0.000017 loss: 0.154 loss(reduced): 0.166 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31545.0 tgs: 264 data_time: 0.90s time: 119.32s eta: 13:43:16 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:04:44][INFO] [Train] (Epoch 1) Step 181/593 lr: 0.000017 loss: 0.124 loss(reduced): 0.161 grad_norm: 0.14 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 30210.0 tgs: 253 data_time: 0.74s time: 119.12s eta: 13:39:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:06:43][INFO] [Train] (Epoch 1) Step 182/593 lr: 0.000017 loss: 0.119 loss(reduced): 0.157 grad_norm: 0.16 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31402.0 tgs: 263 data_time: 0.66s time: 119.26s eta: 13:38:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:08:42][INFO] [Train] (Epoch 1) Step 183/593 lr: 0.000017 loss: 0.127 loss(reduced): 0.168 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 30979.0 tgs: 260 data_time: 0.77s time: 119.13s eta: 13:36:01 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:10:41][INFO] [Train] (Epoch 1) Step 184/593 lr: 0.000017 loss: 0.173 loss(reduced): 0.171 grad_norm: 0.16 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31924.0 tgs: 267 data_time: 0.64s time: 119.22s eta: 13:34:39 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:12:41][INFO] [Train] (Epoch 1) Step 185/593 lr: 0.000017 loss: 0.132 loss(reduced): 0.165 grad_norm: 0.18 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31521.0 tgs: 263 data_time: 0.82s time: 119.46s eta: 13:34:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:14:40][INFO] [Train] (Epoch 1) Step 186/593 lr: 0.000017 loss: 0.228 loss(reduced): 0.163 grad_norm: 0.14 if_nan_skip: 0 max_memory: 40.8GB text_tokens: 30756.0 tgs: 258 data_time: 1.03s time: 119.09s eta: 13:29:47 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:16:39][INFO] [Train] (Epoch 1) Step 187/593 lr: 0.000017 loss: 0.215 loss(reduced): 0.166 grad_norm: 0.18 if_nan_skip: 0 max_memory: 40.8GB text_tokens: 30239.0 tgs: 254 data_time: 0.79s time: 118.92s eta: 13:26:39 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:18:39][INFO] [Train] (Epoch 1) Step 188/593 lr: 0.000017 loss: 0.147 loss(reduced): 0.163 grad_norm: 0.19 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31540.0 tgs: 263 data_time: 0.62s time: 119.81s eta: 13:30:42 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:20:38][INFO] [Train] (Epoch 1) Step 189/593 lr: 0.000017 loss: 0.177 loss(reduced): 0.165 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32292.0 tgs: 270 data_time: 0.89s time: 119.25s eta: 13:24:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:22:37][INFO] [Train] (Epoch 1) Step 190/593 lr: 0.000017 loss: 0.174 loss(reduced): 0.168 grad_norm: 0.19 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31612.0 tgs: 265 data_time: 0.79s time: 119.16s eta: 13:22:20 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:24:36][INFO] [Train] (Epoch 1) Step 191/593 lr: 0.000017 loss: 0.162 loss(reduced): 0.164 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32274.0 tgs: 270 data_time: 0.77s time: 119.35s eta: 13:21:39 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:26:36][INFO] [Train] (Epoch 1) Step 192/593 lr: 0.000017 loss: 0.170 loss(reduced): 0.157 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32015.0 tgs: 268 data_time: 0.52s time: 119.26s eta: 13:19:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:28:35][INFO] [Train] (Epoch 1) Step 193/593 lr: 0.000017 loss: 0.181 loss(reduced): 0.165 grad_norm: 0.16 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32346.0 tgs: 271 data_time: 0.74s time: 119.04s eta: 13:15:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:30:34][INFO] [Train] (Epoch 1) Step 194/593 lr: 0.000017 loss: 0.274 loss(reduced): 0.169 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32171.0 tgs: 269 data_time: 0.93s time: 119.37s eta: 13:15:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:32:33][INFO] [Train] (Epoch 1) Step 195/593 lr: 0.000017 loss: 0.165 loss(reduced): 0.159 grad_norm: 0.14 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31963.0 tgs: 268 data_time: 0.89s time: 119.19s eta: 13:12:36 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:34:33][INFO] [Train] (Epoch 1) Step 196/593 lr: 0.000017 loss: 0.121 loss(reduced): 0.167 grad_norm: 0.16 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31708.0 tgs: 265 data_time: 0.84s time: 119.46s eta: 13:12:23 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:36:32][INFO] [Train] (Epoch 1) Step 197/593 lr: 0.000017 loss: 0.178 loss(reduced): 0.166 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 32162.0 tgs: 269 data_time: 0.75s time: 119.29s eta: 13:09:16 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:38:31][INFO] [Train] (Epoch 1) Step 198/593 lr: 0.000017 loss: 0.174 loss(reduced): 0.162 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31971.0 tgs: 268 data_time: 0.64s time: 119.14s eta: 13:06:20 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:40:33][INFO] [Train] (Epoch 1) Step 199/593 lr: 0.000017 loss: 0.211 loss(reduced): 0.172 grad_norm: 0.12 if_nan_skip: 0 max_memory: 40.8GB text_tokens: 31170.0 tgs: 260 data_time: 0.49s time: 119.80s eta: 13:08:41 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:42:33][INFO] [Train] (Epoch 1) Step 200/593 lr: 0.000017 loss: 0.113 loss(reduced): 0.160 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32008.0 tgs: 266 data_time: 0.71s time: 120.16s eta: 13:09:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:44:33][INFO] [Train] (Epoch 1) Step 201/593 lr: 0.000017 loss: 0.155 loss(reduced): 0.167 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31146.0 tgs: 260 data_time: 0.75s time: 119.50s eta: 13:02:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:46:32][INFO] [Train] (Epoch 1) Step 202/593 lr: 0.000017 loss: 0.137 loss(reduced): 0.160 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31102.0 tgs: 260 data_time: 0.62s time: 119.50s eta: 13:00:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:48:32][INFO] [Train] (Epoch 1) Step 203/593 lr: 0.000017 loss: 0.139 loss(reduced): 0.166 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31646.0 tgs: 264 data_time: 0.81s time: 119.58s eta: 12:59:13 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:50:32][INFO] [Train] (Epoch 1) Step 204/593 lr: 0.000017 loss: 0.176 loss(reduced): 0.167 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31660.0 tgs: 264 data_time: 0.76s time: 119.80s eta: 12:58:40 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:52:31][INFO] [Train] (Epoch 1) Step 205/593 lr: 0.000017 loss: 0.114 loss(reduced): 0.165 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 30304.0 tgs: 254 data_time: 0.48s time: 119.03s eta: 12:51:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:54:30][INFO] [Train] (Epoch 1) Step 206/593 lr: 0.000017 loss: 0.129 loss(reduced): 0.161 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32097.0 tgs: 269 data_time: 0.81s time: 119.05s eta: 12:49:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:56:29][INFO] [Train] (Epoch 1) Step 207/593 lr: 0.000017 loss: 0.159 loss(reduced): 0.164 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31600.0 tgs: 265 data_time: 0.79s time: 119.12s eta: 12:48:21 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-21 23:58:29][INFO] [Train] (Epoch 1) Step 208/593 lr: 0.000016 loss: 0.120 loss(reduced): 0.159 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31309.0 tgs: 262 data_time: 0.75s time: 119.40s eta: 12:48:07 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:00:28][INFO] [Train] (Epoch 1) Step 209/593 lr: 0.000016 loss: 0.139 loss(reduced): 0.161 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32114.0 tgs: 269 data_time: 0.92s time: 119.19s eta: 12:44:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:02:27][INFO] [Train] (Epoch 1) Step 210/593 lr: 0.000016 loss: 0.181 loss(reduced): 0.165 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31183.0 tgs: 261 data_time: 0.84s time: 119.46s eta: 12:44:33 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:04:26][INFO] [Train] (Epoch 1) Step 211/593 lr: 0.000016 loss: 0.133 loss(reduced): 0.164 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32329.0 tgs: 271 data_time: 0.61s time: 119.08s eta: 12:40:06 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:06:25][INFO] [Train] (Epoch 1) Step 212/593 lr: 0.000016 loss: 0.169 loss(reduced): 0.165 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31908.0 tgs: 267 data_time: 0.51s time: 119.07s eta: 12:38:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:08:25][INFO] [Train] (Epoch 1) Step 213/593 lr: 0.000016 loss: 0.150 loss(reduced): 0.162 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31459.0 tgs: 263 data_time: 0.87s time: 119.59s eta: 12:39:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:10:24][INFO] [Train] (Epoch 1) Step 214/593 lr: 0.000016 loss: 0.214 loss(reduced): 0.158 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32188.0 tgs: 270 data_time: 0.84s time: 118.78s eta: 12:32:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:12:23][INFO] [Train] (Epoch 1) Step 215/593 lr: 0.000016 loss: 0.152 loss(reduced): 0.161 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32025.0 tgs: 269 data_time: 0.92s time: 118.88s eta: 12:30:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:14:22][INFO] [Train] (Epoch 1) Step 216/593 lr: 0.000016 loss: 0.135 loss(reduced): 0.166 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31597.0 tgs: 265 data_time: 0.79s time: 119.17s eta: 12:30:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:16:21][INFO] [Train] (Epoch 1) Step 217/593 lr: 0.000016 loss: 0.189 loss(reduced): 0.152 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31743.0 tgs: 266 data_time: 0.64s time: 119.26s eta: 12:29:21 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:18:20][INFO] [Train] (Epoch 1) Step 218/593 lr: 0.000016 loss: 0.162 loss(reduced): 0.174 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31466.0 tgs: 263 data_time: 0.54s time: 119.34s eta: 12:27:53 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:20:20][INFO] [Train] (Epoch 1) Step 219/593 lr: 0.000016 loss: 0.168 loss(reduced): 0.158 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32399.0 tgs: 271 data_time: 0.57s time: 119.13s eta: 12:24:33 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:22:19][INFO] [Train] (Epoch 1) Step 220/593 lr: 0.000016 loss: 0.152 loss(reduced): 0.163 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32066.0 tgs: 268 data_time: 0.64s time: 119.34s eta: 12:23:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:24:18][INFO] [Train] (Epoch 1) Step 221/593 lr: 0.000016 loss: 0.165 loss(reduced): 0.160 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32477.0 tgs: 272 data_time: 0.77s time: 119.28s eta: 12:21:32 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:26:17][INFO] [Train] (Epoch 1) Step 222/593 lr: 0.000016 loss: 0.166 loss(reduced): 0.152 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32311.0 tgs: 271 data_time: 0.74s time: 119.00s eta: 12:17:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:28:18][INFO] [Train] (Epoch 1) Step 223/593 lr: 0.000016 loss: 0.137 loss(reduced): 0.165 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31939.0 tgs: 263 data_time: 1.09s time: 121.13s eta: 12:28:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:30:18][INFO] [Train] (Epoch 1) Step 224/593 lr: 0.000016 loss: 0.163 loss(reduced): 0.161 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31894.0 tgs: 265 data_time: 0.67s time: 119.98s eta: 12:19:52 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:32:17][INFO] [Train] (Epoch 1) Step 225/593 lr: 0.000016 loss: 0.189 loss(reduced): 0.162 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31769.0 tgs: 267 data_time: 0.76s time: 118.72s eta: 12:10:09 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:34:17][INFO] [Train] (Epoch 1) Step 226/593 lr: 0.000016 loss: 0.133 loss(reduced): 0.173 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32348.0 tgs: 270 data_time: 0.68s time: 119.63s eta: 12:13:42 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:36:17][INFO] [Train] (Epoch 1) Step 227/593 lr: 0.000016 loss: 0.145 loss(reduced): 0.152 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31013.0 tgs: 258 data_time: 0.75s time: 119.83s eta: 12:12:58 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:38:16][INFO] [Train] (Epoch 1) Step 228/593 lr: 0.000016 loss: 0.172 loss(reduced): 0.156 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31535.0 tgs: 264 data_time: 0.81s time: 119.14s eta: 12:06:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:40:15][INFO] [Train] (Epoch 1) Step 229/593 lr: 0.000016 loss: 0.260 loss(reduced): 0.167 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31858.0 tgs: 267 data_time: 0.92s time: 119.17s eta: 12:04:57 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:42:14][INFO] [Train] (Epoch 1) Step 230/593 lr: 0.000016 loss: 0.177 loss(reduced): 0.173 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31812.0 tgs: 267 data_time: 0.93s time: 119.12s eta: 12:02:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:44:13][INFO] [Train] (Epoch 1) Step 231/593 lr: 0.000016 loss: 0.130 loss(reduced): 0.162 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32180.0 tgs: 269 data_time: 0.57s time: 119.21s eta: 12:01:13 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:46:12][INFO] [Train] (Epoch 1) Step 232/593 lr: 0.000016 loss: 0.129 loss(reduced): 0.160 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31793.0 tgs: 267 data_time: 0.66s time: 118.92s eta: 11:57:30 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:48:12][INFO] [Train] (Epoch 1) Step 233/593 lr: 0.000016 loss: 0.172 loss(reduced): 0.159 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31540.0 tgs: 263 data_time: 0.81s time: 119.72s eta: 12:00:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:50:11][INFO] [Train] (Epoch 1) Step 234/593 lr: 0.000016 loss: 0.128 loss(reduced): 0.169 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30544.0 tgs: 256 data_time: 0.81s time: 119.29s eta: 11:55:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:52:10][INFO] [Train] (Epoch 1) Step 235/593 lr: 0.000016 loss: 0.121 loss(reduced): 0.168 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32314.0 tgs: 270 data_time: 0.59s time: 119.24s eta: 11:53:28 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:54:10][INFO] [Train] (Epoch 1) Step 236/593 lr: 0.000016 loss: 0.137 loss(reduced): 0.163 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32245.0 tgs: 269 data_time: 0.78s time: 119.71s eta: 11:54:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:56:09][INFO] [Train] (Epoch 1) Step 237/593 lr: 0.000016 loss: 0.160 loss(reduced): 0.165 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31701.0 tgs: 266 data_time: 0.87s time: 118.77s eta: 11:46:40 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 00:58:08][INFO] [Train] (Epoch 1) Step 238/593 lr: 0.000015 loss: 0.146 loss(reduced): 0.161 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31096.0 tgs: 260 data_time: 0.54s time: 119.16s eta: 11:47:01 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:00:08][INFO] [Train] (Epoch 1) Step 239/593 lr: 0.000015 loss: 0.139 loss(reduced): 0.159 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32280.0 tgs: 269 data_time: 0.67s time: 119.60s eta: 11:47:39 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:02:07][INFO] [Train] (Epoch 1) Step 240/593 lr: 0.000015 loss: 0.145 loss(reduced): 0.161 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31969.0 tgs: 268 data_time: 0.69s time: 118.87s eta: 11:41:21 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:04:05][INFO] [Train] (Epoch 1) Step 241/593 lr: 0.000015 loss: 0.123 loss(reduced): 0.155 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30453.0 tgs: 256 data_time: 0.63s time: 118.69s eta: 11:38:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:06:05][INFO] [Train] (Epoch 1) Step 242/593 lr: 0.000015 loss: 0.158 loss(reduced): 0.162 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32281.0 tgs: 270 data_time: 1.10s time: 119.23s eta: 11:39:29 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:08:04][INFO] [Train] (Epoch 1) Step 243/593 lr: 0.000015 loss: 0.129 loss(reduced): 0.162 grad_norm: 0.38 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31590.0 tgs: 265 data_time: 0.87s time: 119.12s eta: 11:36:50 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:10:03][INFO] [Train] (Epoch 1) Step 244/593 lr: 0.000015 loss: 0.149 loss(reduced): 0.162 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32210.0 tgs: 270 data_time: 0.60s time: 119.07s eta: 11:34:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:12:02][INFO] [Train] (Epoch 1) Step 245/593 lr: 0.000015 loss: 0.200 loss(reduced): 0.157 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31267.0 tgs: 262 data_time: 0.98s time: 118.94s eta: 11:31:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:14:01][INFO] [Train] (Epoch 1) Step 246/593 lr: 0.000015 loss: 0.164 loss(reduced): 0.165 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31652.0 tgs: 266 data_time: 0.63s time: 118.91s eta: 11:29:41 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:16:00][INFO] [Train] (Epoch 1) Step 247/593 lr: 0.000015 loss: 0.128 loss(reduced): 0.162 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31147.0 tgs: 261 data_time: 0.73s time: 118.97s eta: 11:28:03 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:17:59][INFO] [Train] (Epoch 1) Step 248/593 lr: 0.000015 loss: 0.171 loss(reduced): 0.156 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31720.0 tgs: 265 data_time: 0.77s time: 119.27s eta: 11:27:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:19:58][INFO] [Train] (Epoch 1) Step 249/593 lr: 0.000015 loss: 0.186 loss(reduced): 0.158 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31912.0 tgs: 267 data_time: 0.73s time: 119.36s eta: 11:26:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:21:57][INFO] [Train] (Epoch 1) Step 250/593 lr: 0.000015 loss: 0.202 loss(reduced): 0.165 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31545.0 tgs: 265 data_time: 0.90s time: 118.87s eta: 11:21:32 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:23:56][INFO] [Train] (Epoch 1) Step 251/593 lr: 0.000015 loss: 0.162 loss(reduced): 0.159 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31324.0 tgs: 262 data_time: 0.96s time: 119.20s eta: 11:21:27 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:25:56][INFO] [Train] (Epoch 1) Step 252/593 lr: 0.000015 loss: 0.161 loss(reduced): 0.160 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31645.0 tgs: 264 data_time: 0.64s time: 119.72s eta: 11:22:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:27:55][INFO] [Train] (Epoch 1) Step 253/593 lr: 0.000015 loss: 0.135 loss(reduced): 0.167 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30615.0 tgs: 257 data_time: 0.69s time: 119.11s eta: 11:16:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:29:54][INFO] [Train] (Epoch 1) Step 254/593 lr: 0.000015 loss: 0.144 loss(reduced): 0.154 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31424.0 tgs: 263 data_time: 0.71s time: 119.12s eta: 11:15:01 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:31:53][INFO] [Train] (Epoch 1) Step 255/593 lr: 0.000015 loss: 0.112 loss(reduced): 0.165 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30803.0 tgs: 258 data_time: 0.79s time: 118.99s eta: 11:12:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:33:52][INFO] [Train] (Epoch 1) Step 256/593 lr: 0.000015 loss: 0.122 loss(reduced): 0.159 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31452.0 tgs: 264 data_time: 0.55s time: 119.09s eta: 11:10:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:35:52][INFO] [Train] (Epoch 1) Step 257/593 lr: 0.000015 loss: 0.122 loss(reduced): 0.163 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31625.0 tgs: 265 data_time: 0.60s time: 119.18s eta: 11:09:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:37:51][INFO] [Train] (Epoch 1) Step 258/593 lr: 0.000015 loss: 0.165 loss(reduced): 0.165 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31708.0 tgs: 266 data_time: 0.88s time: 118.95s eta: 11:06:06 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:39:50][INFO] [Train] (Epoch 1) Step 259/593 lr: 0.000015 loss: 0.132 loss(reduced): 0.166 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 32086.0 tgs: 268 data_time: 0.79s time: 119.46s eta: 11:07:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:41:50][INFO] [Train] (Epoch 1) Step 260/593 lr: 0.000015 loss: 0.132 loss(reduced): 0.165 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31642.0 tgs: 264 data_time: 1.22s time: 119.64s eta: 11:06:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:43:49][INFO] [Train] (Epoch 1) Step 261/593 lr: 0.000015 loss: 0.195 loss(reduced): 0.157 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32189.0 tgs: 269 data_time: 0.94s time: 119.31s eta: 11:02:09 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:45:48][INFO] [Train] (Epoch 1) Step 262/593 lr: 0.000015 loss: 0.219 loss(reduced): 0.161 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32250.0 tgs: 270 data_time: 0.98s time: 119.30s eta: 11:00:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:47:47][INFO] [Train] (Epoch 1) Step 263/593 lr: 0.000015 loss: 0.123 loss(reduced): 0.160 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31968.0 tgs: 269 data_time: 0.72s time: 118.82s eta: 10:55:29 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:49:47][INFO] [Train] (Epoch 1) Step 264/593 lr: 0.000015 loss: 0.186 loss(reduced): 0.162 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31910.0 tgs: 267 data_time: 0.58s time: 119.48s eta: 10:57:06 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:51:46][INFO] [Train] (Epoch 1) Step 265/593 lr: 0.000014 loss: 0.218 loss(reduced): 0.167 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32338.0 tgs: 270 data_time: 0.85s time: 119.40s eta: 10:54:40 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:53:45][INFO] [Train] (Epoch 1) Step 266/593 lr: 0.000014 loss: 0.130 loss(reduced): 0.155 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31889.0 tgs: 267 data_time: 0.86s time: 119.25s eta: 10:51:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:55:45][INFO] [Train] (Epoch 1) Step 267/593 lr: 0.000014 loss: 0.187 loss(reduced): 0.156 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31414.0 tgs: 263 data_time: 0.71s time: 119.25s eta: 10:49:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:57:44][INFO] [Train] (Epoch 1) Step 268/593 lr: 0.000014 loss: 0.191 loss(reduced): 0.161 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32270.0 tgs: 270 data_time: 0.80s time: 119.11s eta: 10:47:09 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 01:59:43][INFO] [Train] (Epoch 1) Step 269/593 lr: 0.000014 loss: 0.163 loss(reduced): 0.158 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31696.0 tgs: 265 data_time: 0.62s time: 119.52s eta: 10:47:23 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:01:43][INFO] [Train] (Epoch 1) Step 270/593 lr: 0.000014 loss: 0.149 loss(reduced): 0.157 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31870.0 tgs: 267 data_time: 0.66s time: 119.34s eta: 10:44:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:03:42][INFO] [Train] (Epoch 1) Step 271/593 lr: 0.000014 loss: 0.150 loss(reduced): 0.162 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31715.0 tgs: 265 data_time: 0.91s time: 119.48s eta: 10:43:13 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:05:42][INFO] [Train] (Epoch 1) Step 272/593 lr: 0.000014 loss: 0.151 loss(reduced): 0.162 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31985.0 tgs: 267 data_time: 0.77s time: 119.51s eta: 10:41:22 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:07:41][INFO] [Train] (Epoch 1) Step 273/593 lr: 0.000014 loss: 0.173 loss(reduced): 0.165 grad_norm: 0.07 if_nan_skip: 0 max_memory: 40.8GB text_tokens: 31170.0 tgs: 261 data_time: 0.92s time: 118.99s eta: 10:36:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:09:40][INFO] [Train] (Epoch 1) Step 274/593 lr: 0.000014 loss: 0.161 loss(reduced): 0.160 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32105.0 tgs: 269 data_time: 0.57s time: 119.21s eta: 10:35:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:11:39][INFO] [Train] (Epoch 1) Step 275/593 lr: 0.000014 loss: 0.167 loss(reduced): 0.165 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31743.0 tgs: 266 data_time: 0.82s time: 119.05s eta: 10:32:58 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:13:38][INFO] [Train] (Epoch 1) Step 276/593 lr: 0.000014 loss: 0.166 loss(reduced): 0.163 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31441.0 tgs: 264 data_time: 0.83s time: 118.79s eta: 10:29:36 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:15:38][INFO] [Train] (Epoch 1) Step 277/593 lr: 0.000014 loss: 0.154 loss(reduced): 0.159 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31695.0 tgs: 264 data_time: 0.75s time: 119.85s eta: 10:33:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:17:37][INFO] [Train] (Epoch 1) Step 278/593 lr: 0.000014 loss: 0.278 loss(reduced): 0.165 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32418.0 tgs: 271 data_time: 0.57s time: 119.42s eta: 10:28:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:19:36][INFO] [Train] (Epoch 1) Step 279/593 lr: 0.000014 loss: 0.179 loss(reduced): 0.163 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31712.0 tgs: 266 data_time: 0.59s time: 118.81s eta: 10:23:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:21:35][INFO] [Train] (Epoch 1) Step 280/593 lr: 0.000014 loss: 0.134 loss(reduced): 0.163 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31964.0 tgs: 268 data_time: 0.59s time: 118.98s eta: 10:22:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:23:34][INFO] [Train] (Epoch 1) Step 281/593 lr: 0.000014 loss: 0.112 loss(reduced): 0.163 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31111.0 tgs: 260 data_time: 0.76s time: 119.28s eta: 10:22:15 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:25:33][INFO] [Train] (Epoch 1) Step 282/593 lr: 0.000014 loss: 0.158 loss(reduced): 0.160 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31779.0 tgs: 266 data_time: 0.93s time: 119.20s eta: 10:19:50 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:27:32][INFO] [Train] (Epoch 1) Step 283/593 lr: 0.000014 loss: 0.150 loss(reduced): 0.161 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32217.0 tgs: 270 data_time: 0.62s time: 119.06s eta: 10:17:07 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:29:32][INFO] [Train] (Epoch 1) Step 284/593 lr: 0.000014 loss: 0.187 loss(reduced): 0.159 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31799.0 tgs: 266 data_time: 0.87s time: 119.48s eta: 10:17:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:31:31][INFO] [Train] (Epoch 1) Step 285/593 lr: 0.000014 loss: 0.214 loss(reduced): 0.165 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31617.0 tgs: 264 data_time: 0.84s time: 119.42s eta: 10:15:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:33:31][INFO] [Train] (Epoch 1) Step 286/593 lr: 0.000014 loss: 0.137 loss(reduced): 0.168 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32303.0 tgs: 270 data_time: 0.65s time: 119.47s eta: 10:13:15 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:35:30][INFO] [Train] (Epoch 1) Step 287/593 lr: 0.000014 loss: 0.220 loss(reduced): 0.159 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30787.0 tgs: 258 data_time: 0.90s time: 119.32s eta: 10:10:31 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:37:29][INFO] [Train] (Epoch 1) Step 288/593 lr: 0.000014 loss: 0.183 loss(reduced): 0.160 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31276.0 tgs: 262 data_time: 0.74s time: 119.08s eta: 10:07:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:39:29][INFO] [Train] (Epoch 1) Step 289/593 lr: 0.000014 loss: 0.162 loss(reduced): 0.157 grad_norm: 0.13 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31662.0 tgs: 264 data_time: 0.86s time: 119.71s eta: 10:08:30 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:41:29][INFO] [Train] (Epoch 1) Step 290/593 lr: 0.000014 loss: 0.170 loss(reduced): 0.159 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32111.0 tgs: 267 data_time: 0.51s time: 119.83s eta: 10:07:07 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:43:30][INFO] [Train] (Epoch 1) Step 291/593 lr: 0.000014 loss: 0.156 loss(reduced): 0.183 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32345.0 tgs: 266 data_time: 0.56s time: 121.55s eta: 10:13:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:45:29][INFO] [Train] (Epoch 1) Step 292/593 lr: 0.000013 loss: 0.189 loss(reduced): 0.158 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32009.0 tgs: 268 data_time: 0.93s time: 119.26s eta: 10:00:15 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:47:28][INFO] [Train] (Epoch 1) Step 293/593 lr: 0.000013 loss: 0.158 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 31374.0 tgs: 264 data_time: 0.66s time: 118.81s eta: 9:56:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:49:28][INFO] [Train] (Epoch 1) Step 294/593 lr: 0.000013 loss: 0.118 loss(reduced): 0.168 grad_norm: 0.09 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 30939.0 tgs: 258 data_time: 0.62s time: 119.55s eta: 9:57:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:51:28][INFO] [Train] (Epoch 1) Step 295/593 lr: 0.000013 loss: 0.161 loss(reduced): 0.163 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31869.0 tgs: 266 data_time: 0.88s time: 119.70s eta: 9:56:30 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:53:27][INFO] [Train] (Epoch 1) Step 296/593 lr: 0.000013 loss: 0.126 loss(reduced): 0.149 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 30848.0 tgs: 258 data_time: 0.88s time: 119.26s eta: 9:52:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:55:26][INFO] [Train] (Epoch 1) Step 297/593 lr: 0.000013 loss: 0.130 loss(reduced): 0.159 grad_norm: 0.07 if_nan_skip: 0 max_memory: 40.6GB text_tokens: 29950.0 tgs: 250 data_time: 0.87s time: 119.33s eta: 9:50:41 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:57:25][INFO] [Train] (Epoch 1) Step 298/593 lr: 0.000013 loss: 0.128 loss(reduced): 0.161 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32202.0 tgs: 270 data_time: 0.71s time: 119.22s eta: 9:48:09 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 02:59:28][INFO] [Train] (Epoch 1) Step 299/593 lr: 0.000013 loss: 0.177 loss(reduced): 0.160 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31350.0 tgs: 260 data_time: 0.67s time: 120.13s eta: 9:50:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:01:27][INFO] [Train] (Epoch 1) Step 300/593 lr: 0.000013 loss: 0.141 loss(reduced): 0.167 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32595.0 tgs: 273 data_time: 0.58s time: 119.25s eta: 9:44:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:03:27][INFO] [Train] (Epoch 1) Step 301/593 lr: 0.000013 loss: 0.156 loss(reduced): 0.166 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31881.0 tgs: 266 data_time: 0.84s time: 119.48s eta: 9:43:28 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:05:26][INFO] [Train] (Epoch 1) Step 302/593 lr: 0.000013 loss: 0.130 loss(reduced): 0.166 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31319.0 tgs: 262 data_time: 0.79s time: 119.52s eta: 9:41:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:07:25][INFO] [Train] (Epoch 1) Step 303/593 lr: 0.000013 loss: 0.122 loss(reduced): 0.150 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31581.0 tgs: 264 data_time: 0.73s time: 119.20s eta: 9:38:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:09:25][INFO] [Train] (Epoch 1) Step 304/593 lr: 0.000013 loss: 0.171 loss(reduced): 0.148 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32229.0 tgs: 269 data_time: 0.69s time: 119.48s eta: 9:37:28 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:11:24][INFO] [Train] (Epoch 1) Step 305/593 lr: 0.000013 loss: 0.112 loss(reduced): 0.152 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31411.0 tgs: 263 data_time: 0.58s time: 119.05s eta: 9:33:26 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:13:23][INFO] [Train] (Epoch 1) Step 306/593 lr: 0.000013 loss: 0.128 loss(reduced): 0.156 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32222.0 tgs: 270 data_time: 0.57s time: 119.24s eta: 9:32:20 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:15:22][INFO] [Train] (Epoch 1) Step 307/593 lr: 0.000013 loss: 0.158 loss(reduced): 0.160 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31166.0 tgs: 261 data_time: 0.80s time: 119.16s eta: 9:30:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:17:22][INFO] [Train] (Epoch 1) Step 308/593 lr: 0.000013 loss: 0.182 loss(reduced): 0.164 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32416.0 tgs: 271 data_time: 0.56s time: 119.22s eta: 9:28:16 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:19:21][INFO] [Train] (Epoch 1) Step 309/593 lr: 0.000013 loss: 0.243 loss(reduced): 0.157 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31629.0 tgs: 264 data_time: 0.75s time: 119.46s eta: 9:27:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:21:20][INFO] [Train] (Epoch 1) Step 310/593 lr: 0.000013 loss: 0.166 loss(reduced): 0.155 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31889.0 tgs: 268 data_time: 0.80s time: 118.98s eta: 9:23:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:23:19][INFO] [Train] (Epoch 1) Step 311/593 lr: 0.000013 loss: 0.128 loss(reduced): 0.160 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31990.0 tgs: 268 data_time: 0.60s time: 119.03s eta: 9:21:26 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:25:19][INFO] [Train] (Epoch 1) Step 312/593 lr: 0.000013 loss: 0.177 loss(reduced): 0.162 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 27782.0 tgs: 232 data_time: 0.63s time: 119.53s eta: 9:21:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:27:18][INFO] [Train] (Epoch 1) Step 313/593 lr: 0.000013 loss: 0.118 loss(reduced): 0.179 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32021.0 tgs: 267 data_time: 0.65s time: 119.59s eta: 9:20:04 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:29:17][INFO] [Train] (Epoch 1) Step 314/593 lr: 0.000013 loss: 0.157 loss(reduced): 0.157 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32488.0 tgs: 272 data_time: 0.92s time: 119.12s eta: 9:15:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:31:16][INFO] [Train] (Epoch 1) Step 315/593 lr: 0.000013 loss: 0.120 loss(reduced): 0.149 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32369.0 tgs: 271 data_time: 0.64s time: 119.07s eta: 9:13:40 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:33:15][INFO] [Train] (Epoch 1) Step 316/593 lr: 0.000013 loss: 0.157 loss(reduced): 0.159 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31977.0 tgs: 268 data_time: 0.90s time: 118.90s eta: 9:10:52 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:35:15][INFO] [Train] (Epoch 1) Step 317/593 lr: 0.000013 loss: 0.137 loss(reduced): 0.153 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32476.0 tgs: 272 data_time: 0.76s time: 119.32s eta: 9:10:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:37:14][INFO] [Train] (Epoch 1) Step 318/593 lr: 0.000012 loss: 0.167 loss(reduced): 0.162 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31164.0 tgs: 261 data_time: 1.07s time: 119.09s eta: 9:07:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:39:13][INFO] [Train] (Epoch 1) Step 319/593 lr: 0.000012 loss: 0.137 loss(reduced): 0.152 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 30880.0 tgs: 259 data_time: 0.56s time: 118.85s eta: 9:04:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:41:12][INFO] [Train] (Epoch 1) Step 320/593 lr: 0.000012 loss: 0.139 loss(reduced): 0.174 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31459.0 tgs: 262 data_time: 0.69s time: 119.67s eta: 9:06:29 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:43:12][INFO] [Train] (Epoch 1) Step 321/593 lr: 0.000012 loss: 0.175 loss(reduced): 0.152 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31285.0 tgs: 261 data_time: 0.56s time: 119.43s eta: 9:03:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:45:11][INFO] [Train] (Epoch 1) Step 322/593 lr: 0.000012 loss: 0.198 loss(reduced): 0.156 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31374.0 tgs: 262 data_time: 0.97s time: 119.44s eta: 9:01:26 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:47:10][INFO] [Train] (Epoch 1) Step 323/593 lr: 0.000012 loss: 0.275 loss(reduced): 0.157 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 30986.0 tgs: 260 data_time: 0.94s time: 118.73s eta: 8:56:16 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:49:09][INFO] [Train] (Epoch 1) Step 324/593 lr: 0.000012 loss: 0.170 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32084.0 tgs: 269 data_time: 0.71s time: 118.90s eta: 8:55:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:51:08][INFO] [Train] (Epoch 1) Step 325/593 lr: 0.000012 loss: 0.148 loss(reduced): 0.159 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31531.0 tgs: 264 data_time: 0.81s time: 119.30s eta: 8:54:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:53:07][INFO] [Train] (Epoch 1) Step 326/593 lr: 0.000012 loss: 0.118 loss(reduced): 0.159 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31753.0 tgs: 266 data_time: 0.69s time: 119.15s eta: 8:52:12 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:55:07][INFO] [Train] (Epoch 1) Step 327/593 lr: 0.000012 loss: 0.140 loss(reduced): 0.157 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31676.0 tgs: 265 data_time: 0.65s time: 119.27s eta: 8:50:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:57:07][INFO] [Train] (Epoch 1) Step 328/593 lr: 0.000012 loss: 0.106 loss(reduced): 0.164 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31715.0 tgs: 263 data_time: 0.64s time: 120.47s eta: 8:54:04 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 03:59:06][INFO] [Train] (Epoch 1) Step 329/593 lr: 0.000012 loss: 0.165 loss(reduced): 0.155 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31985.0 tgs: 268 data_time: 0.61s time: 119.23s eta: 8:46:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:01:05][INFO] [Train] (Epoch 1) Step 330/593 lr: 0.000012 loss: 0.145 loss(reduced): 0.160 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32250.0 tgs: 270 data_time: 0.67s time: 119.10s eta: 8:44:01 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:03:06][INFO] [Train] (Epoch 1) Step 331/593 lr: 0.000012 loss: 0.178 loss(reduced): 0.160 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31521.0 tgs: 260 data_time: 0.69s time: 120.80s eta: 8:49:29 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:05:06][INFO] [Train] (Epoch 1) Step 332/593 lr: 0.000012 loss: 0.107 loss(reduced): 0.155 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31377.0 tgs: 261 data_time: 1.00s time: 120.20s eta: 8:44:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:07:05][INFO] [Train] (Epoch 1) Step 333/593 lr: 0.000012 loss: 0.136 loss(reduced): 0.148 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32176.0 tgs: 270 data_time: 0.58s time: 118.83s eta: 8:36:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:09:05][INFO] [Train] (Epoch 1) Step 334/593 lr: 0.000012 loss: 0.132 loss(reduced): 0.159 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 30387.0 tgs: 254 data_time: 0.63s time: 119.38s eta: 8:37:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:11:04][INFO] [Train] (Epoch 1) Step 335/593 lr: 0.000012 loss: 0.139 loss(reduced): 0.151 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31766.0 tgs: 266 data_time: 0.67s time: 119.29s eta: 8:34:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:13:03][INFO] [Train] (Epoch 1) Step 336/593 lr: 0.000012 loss: 0.174 loss(reduced): 0.164 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31829.0 tgs: 267 data_time: 0.85s time: 119.06s eta: 8:31:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:15:02][INFO] [Train] (Epoch 1) Step 337/593 lr: 0.000012 loss: 0.159 loss(reduced): 0.161 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32164.0 tgs: 269 data_time: 0.90s time: 119.17s eta: 8:30:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:17:01][INFO] [Train] (Epoch 1) Step 338/593 lr: 0.000012 loss: 0.189 loss(reduced): 0.159 grad_norm: 0.08 if_nan_skip: 0 max_memory: 40.8GB text_tokens: 29897.0 tgs: 251 data_time: 0.68s time: 119.05s eta: 8:27:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:19:00][INFO] [Train] (Epoch 1) Step 339/593 lr: 0.000012 loss: 0.128 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32036.0 tgs: 268 data_time: 0.86s time: 119.12s eta: 8:26:15 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:21:00][INFO] [Train] (Epoch 1) Step 340/593 lr: 0.000012 loss: 0.159 loss(reduced): 0.156 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32269.0 tgs: 270 data_time: 0.91s time: 119.24s eta: 8:24:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:22:59][INFO] [Train] (Epoch 1) Step 341/593 lr: 0.000012 loss: 0.187 loss(reduced): 0.162 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31112.0 tgs: 260 data_time: 0.77s time: 119.22s eta: 8:22:41 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:24:58][INFO] [Train] (Epoch 1) Step 342/593 lr: 0.000012 loss: 0.211 loss(reduced): 0.158 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31075.0 tgs: 260 data_time: 0.72s time: 119.38s eta: 8:21:23 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:26:58][INFO] [Train] (Epoch 1) Step 343/593 lr: 0.000012 loss: 0.154 loss(reduced): 0.157 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31844.0 tgs: 265 data_time: 0.72s time: 119.77s eta: 8:21:03 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:28:57][INFO] [Train] (Epoch 1) Step 344/593 lr: 0.000012 loss: 0.165 loss(reduced): 0.158 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31580.0 tgs: 264 data_time: 0.77s time: 119.20s eta: 8:16:40 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:30:56][INFO] [Train] (Epoch 1) Step 345/593 lr: 0.000011 loss: 0.149 loss(reduced): 0.154 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 30329.0 tgs: 254 data_time: 0.67s time: 119.02s eta: 8:13:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:32:56][INFO] [Train] (Epoch 1) Step 346/593 lr: 0.000011 loss: 0.131 loss(reduced): 0.163 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32217.0 tgs: 268 data_time: 0.70s time: 119.91s eta: 8:15:36 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:34:56][INFO] [Train] (Epoch 1) Step 347/593 lr: 0.000011 loss: 0.146 loss(reduced): 0.159 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31680.0 tgs: 264 data_time: 0.62s time: 119.55s eta: 8:12:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:36:55][INFO] [Train] (Epoch 1) Step 348/593 lr: 0.000011 loss: 0.182 loss(reduced): 0.155 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32202.0 tgs: 269 data_time: 0.66s time: 119.59s eta: 8:10:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:38:54][INFO] [Train] (Epoch 1) Step 349/593 lr: 0.000011 loss: 0.198 loss(reduced): 0.154 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31300.0 tgs: 263 data_time: 0.56s time: 119.00s eta: 8:05:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:40:54][INFO] [Train] (Epoch 1) Step 350/593 lr: 0.000011 loss: 0.101 loss(reduced): 0.161 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 28806.0 tgs: 240 data_time: 0.62s time: 119.83s eta: 8:07:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:42:54][INFO] [Train] (Epoch 1) Step 351/593 lr: 0.000011 loss: 0.121 loss(reduced): 0.155 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 32048.0 tgs: 267 data_time: 0.74s time: 119.70s eta: 8:04:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:44:53][INFO] [Train] (Epoch 1) Step 352/593 lr: 0.000011 loss: 0.179 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32171.0 tgs: 269 data_time: 0.61s time: 119.40s eta: 8:01:35 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:46:53][INFO] [Train] (Epoch 1) Step 353/593 lr: 0.000011 loss: 0.189 loss(reduced): 0.146 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31906.0 tgs: 267 data_time: 1.05s time: 119.29s eta: 7:59:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:48:52][INFO] [Train] (Epoch 1) Step 354/593 lr: 0.000011 loss: 0.136 loss(reduced): 0.156 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31428.0 tgs: 263 data_time: 0.54s time: 119.19s eta: 7:56:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:50:51][INFO] [Train] (Epoch 1) Step 355/593 lr: 0.000011 loss: 0.194 loss(reduced): 0.162 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31100.0 tgs: 260 data_time: 0.74s time: 119.19s eta: 7:54:47 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:52:50][INFO] [Train] (Epoch 1) Step 356/593 lr: 0.000011 loss: 0.170 loss(reduced): 0.154 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32186.0 tgs: 270 data_time: 0.68s time: 118.94s eta: 7:51:47 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:54:49][INFO] [Train] (Epoch 1) Step 357/593 lr: 0.000011 loss: 0.126 loss(reduced): 0.150 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31783.0 tgs: 267 data_time: 0.54s time: 118.97s eta: 7:49:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:56:48][INFO] [Train] (Epoch 1) Step 358/593 lr: 0.000011 loss: 0.164 loss(reduced): 0.153 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32208.0 tgs: 269 data_time: 0.94s time: 119.41s eta: 7:49:41 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 04:58:47][INFO] [Train] (Epoch 1) Step 359/593 lr: 0.000011 loss: 0.146 loss(reduced): 0.159 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32017.0 tgs: 268 data_time: 0.83s time: 119.03s eta: 7:46:13 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:00:47][INFO] [Train] (Epoch 1) Step 360/593 lr: 0.000011 loss: 0.160 loss(reduced): 0.158 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32215.0 tgs: 270 data_time: 0.80s time: 119.10s eta: 7:44:28 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:02:47][INFO] [Train] (Epoch 1) Step 361/593 lr: 0.000011 loss: 0.137 loss(reduced): 0.164 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31770.0 tgs: 264 data_time: 0.73s time: 120.02s eta: 7:46:04 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:04:46][INFO] [Train] (Epoch 1) Step 362/593 lr: 0.000011 loss: 0.134 loss(reduced): 0.151 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31079.0 tgs: 259 data_time: 0.54s time: 119.83s eta: 7:43:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:06:46][INFO] [Train] (Epoch 1) Step 363/593 lr: 0.000011 loss: 0.114 loss(reduced): 0.157 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31313.0 tgs: 262 data_time: 0.66s time: 119.17s eta: 7:38:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:08:45][INFO] [Train] (Epoch 1) Step 364/593 lr: 0.000011 loss: 0.150 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32292.0 tgs: 271 data_time: 0.68s time: 118.96s eta: 7:36:01 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:10:44][INFO] [Train] (Epoch 1) Step 365/593 lr: 0.000011 loss: 0.170 loss(reduced): 0.146 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32111.0 tgs: 269 data_time: 0.72s time: 119.32s eta: 7:35:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:12:43][INFO] [Train] (Epoch 1) Step 366/593 lr: 0.000011 loss: 0.154 loss(reduced): 0.157 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31682.0 tgs: 265 data_time: 0.78s time: 119.39s eta: 7:33:41 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:14:42][INFO] [Train] (Epoch 1) Step 367/593 lr: 0.000011 loss: 0.161 loss(reduced): 0.165 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31435.0 tgs: 263 data_time: 0.74s time: 119.07s eta: 7:30:29 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:16:41][INFO] [Train] (Epoch 1) Step 368/593 lr: 0.000011 loss: 0.138 loss(reduced): 0.158 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31965.0 tgs: 268 data_time: 0.79s time: 118.98s eta: 7:28:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:18:41][INFO] [Train] (Epoch 1) Step 369/593 lr: 0.000011 loss: 0.137 loss(reduced): 0.149 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32421.0 tgs: 271 data_time: 0.80s time: 119.23s eta: 7:27:07 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:20:40][INFO] [Train] (Epoch 1) Step 370/593 lr: 0.000011 loss: 0.147 loss(reduced): 0.154 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 30436.0 tgs: 255 data_time: 0.66s time: 119.01s eta: 7:24:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:22:39][INFO] [Train] (Epoch 1) Step 371/593 lr: 0.000011 loss: 0.133 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31425.0 tgs: 264 data_time: 0.63s time: 118.91s eta: 7:21:57 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:24:38][INFO] [Train] (Epoch 1) Step 372/593 lr: 0.000010 loss: 0.192 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32364.0 tgs: 271 data_time: 0.77s time: 119.08s eta: 7:20:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:26:37][INFO] [Train] (Epoch 1) Step 373/593 lr: 0.000010 loss: 0.125 loss(reduced): 0.161 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30653.0 tgs: 257 data_time: 0.86s time: 118.99s eta: 7:18:16 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:28:36][INFO] [Train] (Epoch 1) Step 374/593 lr: 0.000010 loss: 0.187 loss(reduced): 0.155 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31585.0 tgs: 265 data_time: 0.72s time: 119.02s eta: 7:16:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:30:35][INFO] [Train] (Epoch 1) Step 375/593 lr: 0.000010 loss: 0.143 loss(reduced): 0.157 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32409.0 tgs: 271 data_time: 0.46s time: 119.27s eta: 7:15:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:32:34][INFO] [Train] (Epoch 1) Step 376/593 lr: 0.000010 loss: 0.119 loss(reduced): 0.148 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31615.0 tgs: 264 data_time: 0.53s time: 119.36s eta: 7:13:39 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:34:33][INFO] [Train] (Epoch 1) Step 377/593 lr: 0.000010 loss: 0.193 loss(reduced): 0.160 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31529.0 tgs: 265 data_time: 0.72s time: 118.94s eta: 7:10:09 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:36:32][INFO] [Train] (Epoch 1) Step 378/593 lr: 0.000010 loss: 0.236 loss(reduced): 0.161 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31443.0 tgs: 263 data_time: 0.77s time: 119.22s eta: 7:09:12 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:38:32][INFO] [Train] (Epoch 1) Step 379/593 lr: 0.000010 loss: 0.180 loss(reduced): 0.148 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31988.0 tgs: 267 data_time: 0.58s time: 119.56s eta: 7:08:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:40:32][INFO] [Train] (Epoch 1) Step 380/593 lr: 0.000010 loss: 0.144 loss(reduced): 0.155 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31726.0 tgs: 265 data_time: 0.60s time: 119.67s eta: 7:06:50 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:42:31][INFO] [Train] (Epoch 1) Step 381/593 lr: 0.000010 loss: 0.133 loss(reduced): 0.155 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31531.0 tgs: 265 data_time: 0.76s time: 118.96s eta: 7:02:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:44:30][INFO] [Train] (Epoch 1) Step 382/593 lr: 0.000010 loss: 0.225 loss(reduced): 0.153 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31871.0 tgs: 267 data_time: 0.66s time: 119.29s eta: 7:01:28 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:46:30][INFO] [Train] (Epoch 1) Step 383/593 lr: 0.000010 loss: 0.132 loss(reduced): 0.161 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32550.0 tgs: 272 data_time: 0.84s time: 119.55s eta: 7:00:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:48:29][INFO] [Train] (Epoch 1) Step 384/593 lr: 0.000010 loss: 0.133 loss(reduced): 0.152 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31835.0 tgs: 266 data_time: 0.88s time: 119.41s eta: 6:57:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:50:28][INFO] [Train] (Epoch 1) Step 385/593 lr: 0.000010 loss: 0.131 loss(reduced): 0.152 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31487.0 tgs: 264 data_time: 0.77s time: 118.91s eta: 6:54:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:52:27][INFO] [Train] (Epoch 1) Step 386/593 lr: 0.000010 loss: 0.190 loss(reduced): 0.160 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31543.0 tgs: 264 data_time: 0.88s time: 119.32s eta: 6:53:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:54:26][INFO] [Train] (Epoch 1) Step 387/593 lr: 0.000010 loss: 0.163 loss(reduced): 0.150 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31299.0 tgs: 263 data_time: 0.93s time: 118.93s eta: 6:50:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:56:25][INFO] [Train] (Epoch 1) Step 388/593 lr: 0.000010 loss: 0.146 loss(reduced): 0.155 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31011.0 tgs: 260 data_time: 0.74s time: 119.26s eta: 6:49:28 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 05:58:24][INFO] [Train] (Epoch 1) Step 389/593 lr: 0.000010 loss: 0.131 loss(reduced): 0.152 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31985.0 tgs: 268 data_time: 0.43s time: 119.02s eta: 6:46:39 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:00:24][INFO] [Train] (Epoch 1) Step 390/593 lr: 0.000010 loss: 0.122 loss(reduced): 0.150 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32473.0 tgs: 272 data_time: 0.94s time: 119.23s eta: 6:45:22 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:02:23][INFO] [Train] (Epoch 1) Step 391/593 lr: 0.000010 loss: 0.215 loss(reduced): 0.159 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32216.0 tgs: 269 data_time: 0.80s time: 119.41s eta: 6:44:01 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:04:22][INFO] [Train] (Epoch 1) Step 392/593 lr: 0.000010 loss: 0.119 loss(reduced): 0.149 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31796.0 tgs: 267 data_time: 0.84s time: 119.02s eta: 6:40:41 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:06:21][INFO] [Train] (Epoch 1) Step 393/593 lr: 0.000010 loss: 0.156 loss(reduced): 0.156 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32225.0 tgs: 270 data_time: 0.90s time: 119.03s eta: 6:38:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:08:21][INFO] [Train] (Epoch 1) Step 394/593 lr: 0.000010 loss: 0.160 loss(reduced): 0.159 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31563.0 tgs: 264 data_time: 0.54s time: 119.45s eta: 6:38:10 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:10:19][INFO] [Train] (Epoch 1) Step 395/593 lr: 0.000010 loss: 0.118 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31765.0 tgs: 267 data_time: 0.80s time: 118.78s eta: 6:33:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:12:19][INFO] [Train] (Epoch 1) Step 396/593 lr: 0.000010 loss: 0.189 loss(reduced): 0.162 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 30831.0 tgs: 258 data_time: 0.90s time: 119.45s eta: 6:34:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:14:18][INFO] [Train] (Epoch 1) Step 397/593 lr: 0.000010 loss: 0.250 loss(reduced): 0.166 grad_norm: 0.12 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31760.0 tgs: 265 data_time: 0.74s time: 119.57s eta: 6:32:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:16:18][INFO] [Train] (Epoch 1) Step 398/593 lr: 0.000010 loss: 0.137 loss(reduced): 0.157 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31566.0 tgs: 264 data_time: 0.70s time: 119.37s eta: 6:29:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:18:19][INFO] [Train] (Epoch 1) Step 399/593 lr: 0.000010 loss: 0.148 loss(reduced): 0.148 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31344.0 tgs: 262 data_time: 0.67s time: 119.34s eta: 6:27:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:20:19][INFO] [Train] (Epoch 1) Step 400/593 lr: 0.000010 loss: 0.133 loss(reduced): 0.151 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31620.0 tgs: 264 data_time: 0.66s time: 119.36s eta: 6:25:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:22:17][INFO] [Train] (Epoch 1) Step 401/593 lr: 0.000009 loss: 0.212 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 30792.0 tgs: 259 data_time: 0.82s time: 118.82s eta: 6:22:12 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:24:16][INFO] [Train] (Epoch 1) Step 402/593 lr: 0.000009 loss: 0.164 loss(reduced): 0.160 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31451.0 tgs: 264 data_time: 0.65s time: 119.02s eta: 6:20:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:26:16][INFO] [Train] (Epoch 1) Step 403/593 lr: 0.000009 loss: 0.175 loss(reduced): 0.153 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32178.0 tgs: 269 data_time: 0.64s time: 119.40s eta: 6:20:04 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:28:15][INFO] [Train] (Epoch 1) Step 404/593 lr: 0.000009 loss: 0.143 loss(reduced): 0.156 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32053.0 tgs: 268 data_time: 0.93s time: 119.34s eta: 6:17:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:30:14][INFO] [Train] (Epoch 1) Step 405/593 lr: 0.000009 loss: 0.116 loss(reduced): 0.156 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30904.0 tgs: 259 data_time: 0.55s time: 119.06s eta: 6:15:01 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:32:14][INFO] [Train] (Epoch 1) Step 406/593 lr: 0.000009 loss: 0.119 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31883.0 tgs: 267 data_time: 0.69s time: 119.40s eta: 6:14:07 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:34:13][INFO] [Train] (Epoch 1) Step 407/593 lr: 0.000009 loss: 0.139 loss(reduced): 0.149 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31180.0 tgs: 261 data_time: 0.76s time: 119.38s eta: 6:12:04 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:36:12][INFO] [Train] (Epoch 1) Step 408/593 lr: 0.000009 loss: 0.140 loss(reduced): 0.158 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32184.0 tgs: 269 data_time: 0.77s time: 119.30s eta: 6:09:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:38:11][INFO] [Train] (Epoch 1) Step 409/593 lr: 0.000009 loss: 0.141 loss(reduced): 0.150 grad_norm: 0.11 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32318.0 tgs: 271 data_time: 0.52s time: 119.10s eta: 6:07:13 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:40:13][INFO] [Train] (Epoch 1) Step 410/593 lr: 0.000009 loss: 0.146 loss(reduced): 0.165 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31950.0 tgs: 262 data_time: 0.82s time: 121.94s eta: 6:13:57 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:42:13][INFO] [Train] (Epoch 1) Step 411/593 lr: 0.000009 loss: 0.118 loss(reduced): 0.148 grad_norm: 0.10 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31717.0 tgs: 265 data_time: 0.88s time: 119.38s eta: 6:04:07 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:44:12][INFO] [Train] (Epoch 1) Step 412/593 lr: 0.000009 loss: 0.243 loss(reduced): 0.162 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31253.0 tgs: 261 data_time: 0.97s time: 119.58s eta: 6:02:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:46:12][INFO] [Train] (Epoch 1) Step 413/593 lr: 0.000009 loss: 0.129 loss(reduced): 0.153 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32230.0 tgs: 270 data_time: 0.73s time: 119.25s eta: 5:59:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:48:11][INFO] [Train] (Epoch 1) Step 414/593 lr: 0.000009 loss: 0.264 loss(reduced): 0.154 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32087.0 tgs: 269 data_time: 0.99s time: 119.26s eta: 5:57:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:50:10][INFO] [Train] (Epoch 1) Step 415/593 lr: 0.000009 loss: 0.276 loss(reduced): 0.157 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32065.0 tgs: 269 data_time: 0.97s time: 118.93s eta: 5:54:47 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:52:09][INFO] [Train] (Epoch 1) Step 416/593 lr: 0.000009 loss: 0.113 loss(reduced): 0.158 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31503.0 tgs: 264 data_time: 0.69s time: 119.02s eta: 5:53:05 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:54:08][INFO] [Train] (Epoch 1) Step 417/593 lr: 0.000009 loss: 0.144 loss(reduced): 0.158 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30980.0 tgs: 260 data_time: 0.74s time: 119.05s eta: 5:51:12 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:56:07][INFO] [Train] (Epoch 1) Step 418/593 lr: 0.000009 loss: 0.127 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 30942.0 tgs: 259 data_time: 0.64s time: 119.30s eta: 5:49:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 06:58:06][INFO] [Train] (Epoch 1) Step 419/593 lr: 0.000009 loss: 0.126 loss(reduced): 0.149 grad_norm: 0.07 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 30775.0 tgs: 258 data_time: 0.79s time: 118.94s eta: 5:46:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:00:05][INFO] [Train] (Epoch 1) Step 420/593 lr: 0.000009 loss: 0.142 loss(reduced): 0.163 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31797.0 tgs: 266 data_time: 0.94s time: 119.29s eta: 5:45:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:02:05][INFO] [Train] (Epoch 1) Step 421/593 lr: 0.000009 loss: 0.124 loss(reduced): 0.159 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31761.0 tgs: 266 data_time: 0.67s time: 119.37s eta: 5:44:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:04:04][INFO] [Train] (Epoch 1) Step 422/593 lr: 0.000009 loss: 0.151 loss(reduced): 0.152 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32075.0 tgs: 269 data_time: 0.67s time: 119.06s eta: 5:41:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:06:03][INFO] [Train] (Epoch 1) Step 423/593 lr: 0.000009 loss: 0.145 loss(reduced): 0.146 grad_norm: 0.08 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 30389.0 tgs: 254 data_time: 0.50s time: 119.50s eta: 5:40:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:08:03][INFO] [Train] (Epoch 1) Step 424/593 lr: 0.000009 loss: 0.125 loss(reduced): 0.146 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31877.0 tgs: 267 data_time: 0.70s time: 119.38s eta: 5:38:14 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:10:02][INFO] [Train] (Epoch 1) Step 425/593 lr: 0.000009 loss: 0.116 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31631.0 tgs: 264 data_time: 0.63s time: 119.56s eta: 5:36:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:12:01][INFO] [Train] (Epoch 1) Step 426/593 lr: 0.000009 loss: 0.194 loss(reduced): 0.155 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31954.0 tgs: 268 data_time: 0.65s time: 118.99s eta: 5:33:09 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:14:00][INFO] [Train] (Epoch 1) Step 427/593 lr: 0.000009 loss: 0.147 loss(reduced): 0.146 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31196.0 tgs: 262 data_time: 0.68s time: 118.94s eta: 5:31:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:16:00][INFO] [Train] (Epoch 1) Step 428/593 lr: 0.000009 loss: 0.161 loss(reduced): 0.147 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30845.0 tgs: 258 data_time: 0.74s time: 119.27s eta: 5:29:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:17:59][INFO] [Train] (Epoch 1) Step 429/593 lr: 0.000009 loss: 0.158 loss(reduced): 0.161 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32436.0 tgs: 271 data_time: 1.02s time: 119.37s eta: 5:28:15 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:19:58][INFO] [Train] (Epoch 1) Step 430/593 lr: 0.000009 loss: 0.184 loss(reduced): 0.158 grad_norm: 0.06 if_nan_skip: 0 max_memory: 40.7GB text_tokens: 31170.0 tgs: 260 data_time: 0.94s time: 119.46s eta: 5:26:32 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:21:57][INFO] [Train] (Epoch 1) Step 431/593 lr: 0.000009 loss: 0.137 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31893.0 tgs: 268 data_time: 0.62s time: 118.94s eta: 5:23:07 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:23:57][INFO] [Train] (Epoch 1) Step 432/593 lr: 0.000009 loss: 0.181 loss(reduced): 0.168 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32289.0 tgs: 269 data_time: 0.74s time: 119.63s eta: 5:23:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:25:56][INFO] [Train] (Epoch 1) Step 433/593 lr: 0.000009 loss: 0.162 loss(reduced): 0.156 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31382.0 tgs: 262 data_time: 0.65s time: 119.34s eta: 5:20:14 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:27:56][INFO] [Train] (Epoch 1) Step 434/593 lr: 0.000008 loss: 0.138 loss(reduced): 0.164 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32139.0 tgs: 269 data_time: 0.81s time: 119.13s eta: 5:17:41 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:29:55][INFO] [Train] (Epoch 1) Step 435/593 lr: 0.000008 loss: 0.117 loss(reduced): 0.144 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 30652.0 tgs: 256 data_time: 0.68s time: 119.41s eta: 5:16:26 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:31:55][INFO] [Train] (Epoch 1) Step 436/593 lr: 0.000008 loss: 0.139 loss(reduced): 0.158 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32479.0 tgs: 271 data_time: 0.53s time: 119.58s eta: 5:14:53 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:33:54][INFO] [Train] (Epoch 1) Step 437/593 lr: 0.000008 loss: 0.177 loss(reduced): 0.149 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32167.0 tgs: 270 data_time: 0.66s time: 119.00s eta: 5:11:22 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:35:53][INFO] [Train] (Epoch 1) Step 438/593 lr: 0.000008 loss: 0.160 loss(reduced): 0.153 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31794.0 tgs: 267 data_time: 0.81s time: 119.00s eta: 5:09:23 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:37:52][INFO] [Train] (Epoch 1) Step 439/593 lr: 0.000008 loss: 0.157 loss(reduced): 0.157 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32349.0 tgs: 271 data_time: 0.72s time: 119.22s eta: 5:07:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:39:52][INFO] [Train] (Epoch 1) Step 440/593 lr: 0.000008 loss: 0.227 loss(reduced): 0.161 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 30567.0 tgs: 255 data_time: 0.75s time: 119.69s eta: 5:07:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:41:51][INFO] [Train] (Epoch 1) Step 441/593 lr: 0.000008 loss: 0.118 loss(reduced): 0.153 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32227.0 tgs: 270 data_time: 0.84s time: 119.11s eta: 5:03:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:43:50][INFO] [Train] (Epoch 1) Step 442/593 lr: 0.000008 loss: 0.159 loss(reduced): 0.164 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32263.0 tgs: 270 data_time: 0.95s time: 119.07s eta: 5:01:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:45:51][INFO] [Train] (Epoch 1) Step 443/593 lr: 0.000008 loss: 0.139 loss(reduced): 0.163 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32574.0 tgs: 268 data_time: 0.67s time: 121.51s eta: 5:05:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:47:51][INFO] [Train] (Epoch 1) Step 444/593 lr: 0.000008 loss: 0.149 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31699.0 tgs: 265 data_time: 0.72s time: 119.58s eta: 4:58:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:49:50][INFO] [Train] (Epoch 1) Step 445/593 lr: 0.000008 loss: 0.127 loss(reduced): 0.150 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32147.0 tgs: 269 data_time: 0.63s time: 119.50s eta: 4:56:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:51:50][INFO] [Train] (Epoch 1) Step 446/593 lr: 0.000008 loss: 0.149 loss(reduced): 0.156 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32298.0 tgs: 269 data_time: 0.56s time: 119.73s eta: 4:55:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:53:50][INFO] [Train] (Epoch 1) Step 447/593 lr: 0.000008 loss: 0.154 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32168.0 tgs: 269 data_time: 0.91s time: 119.52s eta: 4:52:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:55:49][INFO] [Train] (Epoch 1) Step 448/593 lr: 0.000008 loss: 0.175 loss(reduced): 0.153 grad_norm: 0.06 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 30415.0 tgs: 254 data_time: 0.86s time: 119.42s eta: 4:50:35 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:57:48][INFO] [Train] (Epoch 1) Step 449/593 lr: 0.000008 loss: 0.126 loss(reduced): 0.141 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31221.0 tgs: 261 data_time: 0.87s time: 119.23s eta: 4:48:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 07:59:47][INFO] [Train] (Epoch 1) Step 450/593 lr: 0.000008 loss: 0.101 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32172.0 tgs: 270 data_time: 0.53s time: 118.89s eta: 4:45:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:01:46][INFO] [Train] (Epoch 1) Step 451/593 lr: 0.000008 loss: 0.267 loss(reduced): 0.164 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31588.0 tgs: 265 data_time: 0.88s time: 119.09s eta: 4:43:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:03:46][INFO] [Train] (Epoch 1) Step 452/593 lr: 0.000008 loss: 0.127 loss(reduced): 0.155 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 26989.0 tgs: 224 data_time: 0.81s time: 119.98s eta: 4:43:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:05:45][INFO] [Train] (Epoch 1) Step 453/593 lr: 0.000008 loss: 0.152 loss(reduced): 0.149 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31693.0 tgs: 266 data_time: 0.69s time: 119.15s eta: 4:39:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:07:44][INFO] [Train] (Epoch 1) Step 454/593 lr: 0.000008 loss: 0.156 loss(reduced): 0.151 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32133.0 tgs: 269 data_time: 0.52s time: 119.02s eta: 4:37:42 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:09:43][INFO] [Train] (Epoch 1) Step 455/593 lr: 0.000008 loss: 0.148 loss(reduced): 0.156 grad_norm: 0.08 if_nan_skip: 0 max_memory: 40.5GB text_tokens: 30663.0 tgs: 257 data_time: 0.67s time: 119.05s eta: 4:35:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:11:43][INFO] [Train] (Epoch 1) Step 456/593 lr: 0.000008 loss: 0.126 loss(reduced): 0.148 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32067.0 tgs: 268 data_time: 0.76s time: 119.37s eta: 4:34:32 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:13:42][INFO] [Train] (Epoch 1) Step 457/593 lr: 0.000008 loss: 0.266 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31558.0 tgs: 264 data_time: 0.65s time: 119.41s eta: 4:32:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:15:41][INFO] [Train] (Epoch 1) Step 458/593 lr: 0.000008 loss: 0.138 loss(reduced): 0.151 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31203.0 tgs: 262 data_time: 0.93s time: 118.93s eta: 4:29:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:17:41][INFO] [Train] (Epoch 1) Step 459/593 lr: 0.000008 loss: 0.136 loss(reduced): 0.153 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32454.0 tgs: 271 data_time: 0.77s time: 119.62s eta: 4:29:09 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:19:40][INFO] [Train] (Epoch 1) Step 460/593 lr: 0.000008 loss: 0.109 loss(reduced): 0.155 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32232.0 tgs: 270 data_time: 0.84s time: 119.21s eta: 4:26:14 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:21:39][INFO] [Train] (Epoch 1) Step 461/593 lr: 0.000008 loss: 0.146 loss(reduced): 0.157 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31898.0 tgs: 267 data_time: 0.75s time: 119.07s eta: 4:23:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:23:38][INFO] [Train] (Epoch 1) Step 462/593 lr: 0.000008 loss: 0.171 loss(reduced): 0.154 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 29983.0 tgs: 252 data_time: 0.75s time: 118.84s eta: 4:21:27 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:25:37][INFO] [Train] (Epoch 1) Step 463/593 lr: 0.000008 loss: 0.128 loss(reduced): 0.153 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32306.0 tgs: 272 data_time: 0.89s time: 118.76s eta: 4:19:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:27:36][INFO] [Train] (Epoch 1) Step 464/593 lr: 0.000008 loss: 0.121 loss(reduced): 0.160 grad_norm: 0.09 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 30630.0 tgs: 256 data_time: 0.48s time: 119.43s eta: 4:18:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:29:36][INFO] [Train] (Epoch 1) Step 465/593 lr: 0.000008 loss: 0.193 loss(reduced): 0.155 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32396.0 tgs: 271 data_time: 0.74s time: 119.40s eta: 4:16:42 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:31:35][INFO] [Train] (Epoch 1) Step 466/593 lr: 0.000008 loss: 0.193 loss(reduced): 0.152 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32099.0 tgs: 268 data_time: 0.64s time: 119.35s eta: 4:14:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:33:34][INFO] [Train] (Epoch 1) Step 467/593 lr: 0.000008 loss: 0.124 loss(reduced): 0.150 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30656.0 tgs: 257 data_time: 0.79s time: 119.05s eta: 4:11:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:35:33][INFO] [Train] (Epoch 1) Step 468/593 lr: 0.000008 loss: 0.224 loss(reduced): 0.152 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31802.0 tgs: 266 data_time: 0.78s time: 119.12s eta: 4:10:09 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:37:32][INFO] [Train] (Epoch 1) Step 469/593 lr: 0.000008 loss: 0.124 loss(reduced): 0.155 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31424.0 tgs: 263 data_time: 0.85s time: 119.13s eta: 4:08:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:39:31][INFO] [Train] (Epoch 1) Step 470/593 lr: 0.000008 loss: 0.229 loss(reduced): 0.151 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31592.0 tgs: 265 data_time: 0.60s time: 118.95s eta: 4:05:50 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:41:30][INFO] [Train] (Epoch 1) Step 471/593 lr: 0.000008 loss: 0.139 loss(reduced): 0.153 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 29607.0 tgs: 248 data_time: 0.75s time: 119.15s eta: 4:04:15 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:43:30][INFO] [Train] (Epoch 1) Step 472/593 lr: 0.000007 loss: 0.171 loss(reduced): 0.151 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31847.0 tgs: 266 data_time: 0.65s time: 119.50s eta: 4:02:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:45:29][INFO] [Train] (Epoch 1) Step 473/593 lr: 0.000007 loss: 0.125 loss(reduced): 0.161 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31185.0 tgs: 261 data_time: 0.73s time: 119.24s eta: 4:00:27 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:47:28][INFO] [Train] (Epoch 1) Step 474/593 lr: 0.000007 loss: 0.130 loss(reduced): 0.154 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32303.0 tgs: 271 data_time: 1.07s time: 119.19s eta: 3:58:23 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:49:28][INFO] [Train] (Epoch 1) Step 475/593 lr: 0.000007 loss: 0.165 loss(reduced): 0.147 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31655.0 tgs: 265 data_time: 0.85s time: 119.36s eta: 3:56:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:51:27][INFO] [Train] (Epoch 1) Step 476/593 lr: 0.000007 loss: 0.183 loss(reduced): 0.153 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31893.0 tgs: 267 data_time: 0.74s time: 119.37s eta: 3:54:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:53:27][INFO] [Train] (Epoch 1) Step 477/593 lr: 0.000007 loss: 0.158 loss(reduced): 0.152 grad_norm: 0.09 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32035.0 tgs: 267 data_time: 0.71s time: 119.95s eta: 3:53:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:55:26][INFO] [Train] (Epoch 1) Step 478/593 lr: 0.000007 loss: 0.121 loss(reduced): 0.151 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32335.0 tgs: 270 data_time: 0.91s time: 119.33s eta: 3:50:41 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:57:26][INFO] [Train] (Epoch 1) Step 479/593 lr: 0.000007 loss: 0.121 loss(reduced): 0.147 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32258.0 tgs: 269 data_time: 0.69s time: 119.63s eta: 3:49:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 08:59:25][INFO] [Train] (Epoch 1) Step 480/593 lr: 0.000007 loss: 0.163 loss(reduced): 0.153 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31736.0 tgs: 267 data_time: 0.92s time: 118.85s eta: 3:45:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:01:25][INFO] [Train] (Epoch 1) Step 481/593 lr: 0.000007 loss: 0.155 loss(reduced): 0.149 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32385.0 tgs: 270 data_time: 0.74s time: 119.76s eta: 3:45:33 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:03:24][INFO] [Train] (Epoch 1) Step 482/593 lr: 0.000007 loss: 0.145 loss(reduced): 0.153 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31591.0 tgs: 264 data_time: 0.79s time: 119.33s eta: 3:42:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:05:23][INFO] [Train] (Epoch 1) Step 483/593 lr: 0.000007 loss: 0.150 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32056.0 tgs: 268 data_time: 0.66s time: 119.25s eta: 3:40:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:07:22][INFO] [Train] (Epoch 1) Step 484/593 lr: 0.000007 loss: 0.128 loss(reduced): 0.150 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31575.0 tgs: 265 data_time: 0.66s time: 118.98s eta: 3:38:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:09:22][INFO] [Train] (Epoch 1) Step 485/593 lr: 0.000007 loss: 0.147 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31737.0 tgs: 265 data_time: 0.64s time: 119.47s eta: 3:37:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:11:21][INFO] [Train] (Epoch 1) Step 486/593 lr: 0.000007 loss: 0.163 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32205.0 tgs: 270 data_time: 0.72s time: 119.00s eta: 3:34:12 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:13:20][INFO] [Train] (Epoch 1) Step 487/593 lr: 0.000007 loss: 0.110 loss(reduced): 0.151 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31281.0 tgs: 262 data_time: 0.71s time: 119.01s eta: 3:32:14 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:15:19][INFO] [Train] (Epoch 1) Step 488/593 lr: 0.000007 loss: 0.108 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31881.0 tgs: 267 data_time: 0.68s time: 119.20s eta: 3:30:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:17:19][INFO] [Train] (Epoch 1) Step 489/593 lr: 0.000007 loss: 0.154 loss(reduced): 0.151 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31848.0 tgs: 265 data_time: 0.64s time: 120.12s eta: 3:30:12 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:19:18][INFO] [Train] (Epoch 1) Step 490/593 lr: 0.000007 loss: 0.215 loss(reduced): 0.156 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32136.0 tgs: 269 data_time: 1.02s time: 119.12s eta: 3:26:28 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:21:18][INFO] [Train] (Epoch 1) Step 491/593 lr: 0.000007 loss: 0.180 loss(reduced): 0.151 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31086.0 tgs: 260 data_time: 0.71s time: 119.32s eta: 3:24:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:23:17][INFO] [Train] (Epoch 1) Step 492/593 lr: 0.000007 loss: 0.198 loss(reduced): 0.148 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31156.0 tgs: 261 data_time: 0.74s time: 119.07s eta: 3:22:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:25:16][INFO] [Train] (Epoch 1) Step 493/593 lr: 0.000007 loss: 0.118 loss(reduced): 0.159 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31024.0 tgs: 259 data_time: 1.05s time: 119.43s eta: 3:21:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:27:15][INFO] [Train] (Epoch 1) Step 494/593 lr: 0.000007 loss: 0.127 loss(reduced): 0.155 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31123.0 tgs: 260 data_time: 1.07s time: 119.28s eta: 3:18:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:29:15][INFO] [Train] (Epoch 1) Step 495/593 lr: 0.000007 loss: 0.156 loss(reduced): 0.153 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31449.0 tgs: 263 data_time: 0.74s time: 119.22s eta: 3:16:42 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:31:14][INFO] [Train] (Epoch 1) Step 496/593 lr: 0.000007 loss: 0.149 loss(reduced): 0.151 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 32068.0 tgs: 269 data_time: 0.88s time: 119.15s eta: 3:14:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:33:13][INFO] [Train] (Epoch 1) Step 497/593 lr: 0.000007 loss: 0.152 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31033.0 tgs: 260 data_time: 0.69s time: 119.02s eta: 3:12:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:35:12][INFO] [Train] (Epoch 1) Step 498/593 lr: 0.000007 loss: 0.156 loss(reduced): 0.150 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32064.0 tgs: 268 data_time: 0.84s time: 119.21s eta: 3:10:44 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:37:15][INFO] [Train] (Epoch 1) Step 499/593 lr: 0.000007 loss: 0.161 loss(reduced): 0.154 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31295.0 tgs: 260 data_time: 0.79s time: 120.15s eta: 3:10:14 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:39:14][INFO] [Train] (Epoch 1) Step 500/593 lr: 0.000007 loss: 0.180 loss(reduced): 0.155 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31920.0 tgs: 267 data_time: 0.67s time: 119.50s eta: 3:07:12 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:41:13][INFO] [Train] (Epoch 1) Step 501/593 lr: 0.000007 loss: 0.102 loss(reduced): 0.149 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31835.0 tgs: 266 data_time: 0.63s time: 119.26s eta: 3:04:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:43:12][INFO] [Train] (Epoch 1) Step 502/593 lr: 0.000007 loss: 0.193 loss(reduced): 0.152 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31950.0 tgs: 268 data_time: 0.61s time: 118.99s eta: 3:02:26 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:45:12][INFO] [Train] (Epoch 1) Step 503/593 lr: 0.000007 loss: 0.184 loss(reduced): 0.147 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31800.0 tgs: 266 data_time: 1.00s time: 119.22s eta: 3:00:48 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:47:11][INFO] [Train] (Epoch 1) Step 504/593 lr: 0.000007 loss: 0.126 loss(reduced): 0.158 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31771.0 tgs: 265 data_time: 0.81s time: 119.46s eta: 2:59:11 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:49:10][INFO] [Train] (Epoch 1) Step 505/593 lr: 0.000007 loss: 0.156 loss(reduced): 0.153 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32468.0 tgs: 272 data_time: 0.84s time: 118.98s eta: 2:56:29 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:51:10][INFO] [Train] (Epoch 1) Step 506/593 lr: 0.000007 loss: 0.123 loss(reduced): 0.154 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30769.0 tgs: 257 data_time: 0.83s time: 119.35s eta: 2:55:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:53:09][INFO] [Train] (Epoch 1) Step 507/593 lr: 0.000007 loss: 0.135 loss(reduced): 0.146 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32333.0 tgs: 270 data_time: 0.80s time: 119.41s eta: 2:53:08 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:55:08][INFO] [Train] (Epoch 1) Step 508/593 lr: 0.000007 loss: 0.179 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32040.0 tgs: 268 data_time: 0.92s time: 119.38s eta: 2:51:06 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:57:10][INFO] [Train] (Epoch 1) Step 509/593 lr: 0.000007 loss: 0.155 loss(reduced): 0.155 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31822.0 tgs: 262 data_time: 0.61s time: 121.45s eta: 2:52:02 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 09:59:09][INFO] [Train] (Epoch 1) Step 510/593 lr: 0.000007 loss: 0.137 loss(reduced): 0.149 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32253.0 tgs: 270 data_time: 0.99s time: 119.22s eta: 2:46:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:01:09][INFO] [Train] (Epoch 1) Step 511/593 lr: 0.000007 loss: 0.181 loss(reduced): 0.148 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32500.0 tgs: 271 data_time: 0.66s time: 119.60s eta: 2:45:26 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:03:08][INFO] [Train] (Epoch 1) Step 512/593 lr: 0.000007 loss: 0.154 loss(reduced): 0.150 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31322.0 tgs: 263 data_time: 0.84s time: 118.92s eta: 2:42:31 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:05:07][INFO] [Train] (Epoch 1) Step 513/593 lr: 0.000007 loss: 0.124 loss(reduced): 0.151 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32042.0 tgs: 268 data_time: 0.84s time: 119.18s eta: 2:40:53 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:07:06][INFO] [Train] (Epoch 1) Step 514/593 lr: 0.000007 loss: 0.134 loss(reduced): 0.146 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30370.0 tgs: 255 data_time: 0.59s time: 118.97s eta: 2:38:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:09:05][INFO] [Train] (Epoch 1) Step 515/593 lr: 0.000007 loss: 0.140 loss(reduced): 0.155 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 32011.0 tgs: 268 data_time: 0.95s time: 119.23s eta: 2:36:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:11:04][INFO] [Train] (Epoch 1) Step 516/593 lr: 0.000007 loss: 0.122 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31095.0 tgs: 261 data_time: 0.78s time: 119.10s eta: 2:34:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:13:04][INFO] [Train] (Epoch 1) Step 517/593 lr: 0.000007 loss: 0.132 loss(reduced): 0.143 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31893.0 tgs: 267 data_time: 0.61s time: 119.43s eta: 2:33:16 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:15:04][INFO] [Train] (Epoch 1) Step 518/593 lr: 0.000007 loss: 0.169 loss(reduced): 0.154 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31005.0 tgs: 257 data_time: 0.53s time: 120.25s eta: 2:32:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:17:03][INFO] [Train] (Epoch 1) Step 519/593 lr: 0.000007 loss: 0.169 loss(reduced): 0.158 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32038.0 tgs: 268 data_time: 0.93s time: 119.25s eta: 2:29:03 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:19:02][INFO] [Train] (Epoch 1) Step 520/593 lr: 0.000007 loss: 0.160 loss(reduced): 0.148 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 30817.0 tgs: 259 data_time: 0.96s time: 118.88s eta: 2:26:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:21:01][INFO] [Train] (Epoch 1) Step 521/593 lr: 0.000007 loss: 0.145 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30782.0 tgs: 257 data_time: 0.67s time: 119.51s eta: 2:25:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:23:01][INFO] [Train] (Epoch 1) Step 522/593 lr: 0.000007 loss: 0.158 loss(reduced): 0.150 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31877.0 tgs: 267 data_time: 0.80s time: 119.09s eta: 2:22:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:25:00][INFO] [Train] (Epoch 1) Step 523/593 lr: 0.000007 loss: 0.202 loss(reduced): 0.150 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31772.0 tgs: 266 data_time: 0.48s time: 119.13s eta: 2:20:58 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:26:59][INFO] [Train] (Epoch 1) Step 524/593 lr: 0.000006 loss: 0.145 loss(reduced): 0.151 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32044.0 tgs: 269 data_time: 0.82s time: 119.07s eta: 2:18:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:28:59][INFO] [Train] (Epoch 1) Step 525/593 lr: 0.000006 loss: 0.146 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31743.0 tgs: 264 data_time: 0.80s time: 119.82s eta: 2:17:47 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:30:58][INFO] [Train] (Epoch 1) Step 526/593 lr: 0.000006 loss: 0.119 loss(reduced): 0.159 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30998.0 tgs: 259 data_time: 0.60s time: 119.34s eta: 2:15:14 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:32:57][INFO] [Train] (Epoch 1) Step 527/593 lr: 0.000006 loss: 0.130 loss(reduced): 0.144 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31692.0 tgs: 265 data_time: 0.75s time: 119.39s eta: 2:13:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:34:57][INFO] [Train] (Epoch 1) Step 528/593 lr: 0.000006 loss: 0.126 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 40.8GB text_tokens: 30177.0 tgs: 252 data_time: 0.78s time: 119.67s eta: 2:11:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:36:56][INFO] [Train] (Epoch 1) Step 529/593 lr: 0.000006 loss: 0.147 loss(reduced): 0.147 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32035.0 tgs: 268 data_time: 0.75s time: 119.49s eta: 2:09:27 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:38:55][INFO] [Train] (Epoch 1) Step 530/593 lr: 0.000006 loss: 0.125 loss(reduced): 0.141 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31583.0 tgs: 265 data_time: 0.65s time: 118.96s eta: 2:06:53 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:40:55][INFO] [Train] (Epoch 1) Step 531/593 lr: 0.000006 loss: 0.139 loss(reduced): 0.155 grad_norm: 0.07 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 30341.0 tgs: 254 data_time: 0.80s time: 119.37s eta: 2:05:20 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:42:54][INFO] [Train] (Epoch 1) Step 532/593 lr: 0.000006 loss: 0.131 loss(reduced): 0.149 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31791.0 tgs: 266 data_time: 0.62s time: 119.30s eta: 2:03:16 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:44:53][INFO] [Train] (Epoch 1) Step 533/593 lr: 0.000006 loss: 0.167 loss(reduced): 0.153 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31326.0 tgs: 262 data_time: 0.60s time: 119.19s eta: 2:01:10 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:46:53][INFO] [Train] (Epoch 1) Step 534/593 lr: 0.000006 loss: 0.132 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31955.0 tgs: 267 data_time: 0.80s time: 119.51s eta: 1:59:30 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:48:52][INFO] [Train] (Epoch 1) Step 535/593 lr: 0.000006 loss: 0.118 loss(reduced): 0.141 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31838.0 tgs: 266 data_time: 1.31s time: 119.42s eta: 1:57:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:50:52][INFO] [Train] (Epoch 1) Step 536/593 lr: 0.000006 loss: 0.169 loss(reduced): 0.153 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32512.0 tgs: 271 data_time: 0.95s time: 119.56s eta: 1:55:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:52:51][INFO] [Train] (Epoch 1) Step 537/593 lr: 0.000006 loss: 0.215 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32043.0 tgs: 268 data_time: 0.61s time: 119.28s eta: 1:53:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:54:50][INFO] [Train] (Epoch 1) Step 538/593 lr: 0.000006 loss: 0.133 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31402.0 tgs: 263 data_time: 0.70s time: 119.24s eta: 1:51:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:56:49][INFO] [Train] (Epoch 1) Step 539/593 lr: 0.000006 loss: 0.135 loss(reduced): 0.151 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31400.0 tgs: 264 data_time: 0.79s time: 118.93s eta: 1:49:01 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 10:58:49][INFO] [Train] (Epoch 1) Step 540/593 lr: 0.000006 loss: 0.130 loss(reduced): 0.145 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31011.0 tgs: 259 data_time: 0.61s time: 119.65s eta: 1:47:40 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:00:49][INFO] [Train] (Epoch 1) Step 541/593 lr: 0.000006 loss: 0.111 loss(reduced): 0.149 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31568.0 tgs: 263 data_time: 0.62s time: 119.73s eta: 1:45:45 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:02:48][INFO] [Train] (Epoch 1) Step 542/593 lr: 0.000006 loss: 0.165 loss(reduced): 0.151 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32268.0 tgs: 270 data_time: 0.82s time: 119.35s eta: 1:43:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:04:47][INFO] [Train] (Epoch 1) Step 543/593 lr: 0.000006 loss: 0.141 loss(reduced): 0.156 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 31370.0 tgs: 262 data_time: 0.56s time: 119.30s eta: 1:41:24 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:06:47][INFO] [Train] (Epoch 1) Step 544/593 lr: 0.000006 loss: 0.159 loss(reduced): 0.150 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31550.0 tgs: 264 data_time: 0.58s time: 119.28s eta: 1:39:23 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:08:46][INFO] [Train] (Epoch 1) Step 545/593 lr: 0.000006 loss: 0.114 loss(reduced): 0.145 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.0GB text_tokens: 30490.0 tgs: 256 data_time: 0.62s time: 118.90s eta: 1:37:06 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:10:45][INFO] [Train] (Epoch 1) Step 546/593 lr: 0.000006 loss: 0.195 loss(reduced): 0.155 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31994.0 tgs: 268 data_time: 0.75s time: 119.16s eta: 1:35:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:12:44][INFO] [Train] (Epoch 1) Step 547/593 lr: 0.000006 loss: 0.152 loss(reduced): 0.151 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31848.0 tgs: 267 data_time: 0.80s time: 119.06s eta: 1:33:15 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:14:43][INFO] [Train] (Epoch 1) Step 548/593 lr: 0.000006 loss: 0.180 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31235.0 tgs: 261 data_time: 0.92s time: 119.27s eta: 1:31:26 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:16:43][INFO] [Train] (Epoch 1) Step 549/593 lr: 0.000006 loss: 0.116 loss(reduced): 0.151 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32418.0 tgs: 270 data_time: 0.57s time: 119.71s eta: 1:29:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:18:42][INFO] [Train] (Epoch 1) Step 550/593 lr: 0.000006 loss: 0.139 loss(reduced): 0.150 grad_norm: 0.06 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 31213.0 tgs: 262 data_time: 0.79s time: 119.04s eta: 1:27:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:20:41][INFO] [Train] (Epoch 1) Step 551/593 lr: 0.000006 loss: 0.169 loss(reduced): 0.143 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31577.0 tgs: 265 data_time: 0.75s time: 118.85s eta: 1:25:10 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:22:40][INFO] [Train] (Epoch 1) Step 552/593 lr: 0.000006 loss: 0.133 loss(reduced): 0.151 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 30653.0 tgs: 256 data_time: 0.52s time: 119.47s eta: 1:23:37 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:24:40][INFO] [Train] (Epoch 1) Step 553/593 lr: 0.000006 loss: 0.111 loss(reduced): 0.156 grad_norm: 0.06 if_nan_skip: 0 max_memory: 40.9GB text_tokens: 30599.0 tgs: 256 data_time: 0.58s time: 119.48s eta: 1:21:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:26:39][INFO] [Train] (Epoch 1) Step 554/593 lr: 0.000006 loss: 0.122 loss(reduced): 0.146 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 29956.0 tgs: 251 data_time: 0.44s time: 118.94s eta: 1:19:17 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:28:38][INFO] [Train] (Epoch 1) Step 555/593 lr: 0.000006 loss: 0.162 loss(reduced): 0.149 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 31890.0 tgs: 268 data_time: 1.04s time: 118.93s eta: 1:17:18 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:30:40][INFO] [Train] (Epoch 1) Step 556/593 lr: 0.000006 loss: 0.180 loss(reduced): 0.150 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31661.0 tgs: 259 data_time: 0.79s time: 122.16s eta: 1:17:22 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:32:39][INFO] [Train] (Epoch 1) Step 557/593 lr: 0.000006 loss: 0.131 loss(reduced): 0.148 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30022.0 tgs: 251 data_time: 0.78s time: 119.36s eta: 1:13:36 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:34:38][INFO] [Train] (Epoch 1) Step 558/593 lr: 0.000006 loss: 0.173 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31854.0 tgs: 267 data_time: 0.79s time: 119.05s eta: 1:11:25 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:36:37][INFO] [Train] (Epoch 1) Step 559/593 lr: 0.000006 loss: 0.127 loss(reduced): 0.144 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31980.0 tgs: 268 data_time: 0.70s time: 118.93s eta: 1:09:22 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:38:36][INFO] [Train] (Epoch 1) Step 560/593 lr: 0.000006 loss: 0.134 loss(reduced): 0.153 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31418.0 tgs: 263 data_time: 0.76s time: 119.10s eta: 1:07:29 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:40:36][INFO] [Train] (Epoch 1) Step 561/593 lr: 0.000006 loss: 0.111 loss(reduced): 0.154 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31785.0 tgs: 266 data_time: 0.58s time: 119.41s eta: 1:05:40 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:42:35][INFO] [Train] (Epoch 1) Step 562/593 lr: 0.000006 loss: 0.127 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 30935.0 tgs: 259 data_time: 0.73s time: 119.34s eta: 1:03:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:44:34][INFO] [Train] (Epoch 1) Step 563/593 lr: 0.000006 loss: 0.168 loss(reduced): 0.152 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32103.0 tgs: 269 data_time: 0.66s time: 119.18s eta: 1:01:34 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:46:33][INFO] [Train] (Epoch 1) Step 564/593 lr: 0.000006 loss: 0.107 loss(reduced): 0.151 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30987.0 tgs: 259 data_time: 0.89s time: 119.30s eta: 0:59:39 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:48:33][INFO] [Train] (Epoch 1) Step 565/593 lr: 0.000006 loss: 0.138 loss(reduced): 0.149 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32080.0 tgs: 268 data_time: 0.76s time: 119.41s eta: 0:57:42 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:50:32][INFO] [Train] (Epoch 1) Step 566/593 lr: 0.000006 loss: 0.138 loss(reduced): 0.149 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31912.0 tgs: 267 data_time: 0.66s time: 119.39s eta: 0:55:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:52:32][INFO] [Train] (Epoch 1) Step 567/593 lr: 0.000006 loss: 0.134 loss(reduced): 0.155 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32124.0 tgs: 268 data_time: 0.80s time: 119.49s eta: 0:53:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:54:31][INFO] [Train] (Epoch 1) Step 568/593 lr: 0.000006 loss: 0.172 loss(reduced): 0.145 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32276.0 tgs: 270 data_time: 0.67s time: 119.36s eta: 0:51:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:56:31][INFO] [Train] (Epoch 1) Step 569/593 lr: 0.000006 loss: 0.166 loss(reduced): 0.161 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 30896.0 tgs: 258 data_time: 0.74s time: 119.46s eta: 0:49:46 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 11:58:30][INFO] [Train] (Epoch 1) Step 570/593 lr: 0.000006 loss: 0.128 loss(reduced): 0.151 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31057.0 tgs: 261 data_time: 0.76s time: 118.89s eta: 0:47:33 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:00:29][INFO] [Train] (Epoch 1) Step 571/593 lr: 0.000006 loss: 0.120 loss(reduced): 0.143 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31820.0 tgs: 266 data_time: 0.67s time: 119.29s eta: 0:45:43 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:02:28][INFO] [Train] (Epoch 1) Step 572/593 lr: 0.000006 loss: 0.141 loss(reduced): 0.151 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32370.0 tgs: 271 data_time: 0.86s time: 119.22s eta: 0:43:42 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:04:27][INFO] [Train] (Epoch 1) Step 573/593 lr: 0.000006 loss: 0.148 loss(reduced): 0.150 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 30898.0 tgs: 259 data_time: 0.62s time: 118.98s eta: 0:41:38 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:06:26][INFO] [Train] (Epoch 1) Step 574/593 lr: 0.000006 loss: 0.160 loss(reduced): 0.150 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32489.0 tgs: 272 data_time: 0.76s time: 119.04s eta: 0:39:40 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:08:26][INFO] [Train] (Epoch 1) Step 575/593 lr: 0.000006 loss: 0.196 loss(reduced): 0.154 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32249.0 tgs: 269 data_time: 0.81s time: 119.66s eta: 0:37:53 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:10:25][INFO] [Train] (Epoch 1) Step 576/593 lr: 0.000006 loss: 0.138 loss(reduced): 0.145 grad_norm: 0.08 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31896.0 tgs: 267 data_time: 0.76s time: 119.28s eta: 0:35:47 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:12:25][INFO] [Train] (Epoch 1) Step 577/593 lr: 0.000006 loss: 0.100 loss(reduced): 0.144 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 30802.0 tgs: 257 data_time: 0.65s time: 119.75s eta: 0:33:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:14:26][INFO] [Train] (Epoch 1) Step 578/593 lr: 0.000006 loss: 0.122 loss(reduced): 0.153 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.1GB text_tokens: 31377.0 tgs: 258 data_time: 0.57s time: 121.22s eta: 0:32:19 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:16:25][INFO] [Train] (Epoch 1) Step 579/593 lr: 0.000006 loss: 0.189 loss(reduced): 0.146 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32054.0 tgs: 268 data_time: 0.88s time: 119.41s eta: 0:29:51 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:18:25][INFO] [Train] (Epoch 1) Step 580/593 lr: 0.000006 loss: 0.126 loss(reduced): 0.153 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31359.0 tgs: 262 data_time: 0.81s time: 119.29s eta: 0:27:50 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:20:24][INFO] [Train] (Epoch 1) Step 581/593 lr: 0.000006 loss: 0.117 loss(reduced): 0.143 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32186.0 tgs: 269 data_time: 0.63s time: 119.63s eta: 0:25:55 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:22:24][INFO] [Train] (Epoch 1) Step 582/593 lr: 0.000006 loss: 0.143 loss(reduced): 0.148 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31417.0 tgs: 263 data_time: 0.64s time: 119.09s eta: 0:23:49 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:24:23][INFO] [Train] (Epoch 1) Step 583/593 lr: 0.000006 loss: 0.137 loss(reduced): 0.148 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 31237.0 tgs: 261 data_time: 0.97s time: 119.38s eta: 0:21:53 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:26:22][INFO] [Train] (Epoch 1) Step 584/593 lr: 0.000006 loss: 0.154 loss(reduced): 0.155 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.5GB text_tokens: 32584.0 tgs: 273 data_time: 0.68s time: 119.26s eta: 0:19:52 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:28:21][INFO] [Train] (Epoch 1) Step 585/593 lr: 0.000006 loss: 0.175 loss(reduced): 0.146 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31509.0 tgs: 264 data_time: 0.91s time: 119.24s eta: 0:17:53 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:30:21][INFO] [Train] (Epoch 1) Step 586/593 lr: 0.000006 loss: 0.203 loss(reduced): 0.144 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32427.0 tgs: 270 data_time: 0.80s time: 119.91s eta: 0:15:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:32:21][INFO] [Train] (Epoch 1) Step 587/593 lr: 0.000006 loss: 0.143 loss(reduced): 0.150 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31335.0 tgs: 262 data_time: 0.81s time: 119.22s eta: 0:13:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:34:20][INFO] [Train] (Epoch 1) Step 588/593 lr: 0.000006 loss: 0.187 loss(reduced): 0.158 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 31523.0 tgs: 264 data_time: 0.70s time: 119.09s eta: 0:11:54 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:36:19][INFO] [Train] (Epoch 1) Step 589/593 lr: 0.000006 loss: 0.114 loss(reduced): 0.150 grad_norm: 0.07 if_nan_skip: 0 max_memory: 41.2GB text_tokens: 32060.0 tgs: 268 data_time: 0.93s time: 119.35s eta: 0:09:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:38:18][INFO] [Train] (Epoch 1) Step 590/593 lr: 0.000006 loss: 0.102 loss(reduced): 0.149 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 31495.0 tgs: 264 data_time: 0.53s time: 119.16s eta: 0:07:56 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:40:18][INFO] [Train] (Epoch 1) Step 591/593 lr: 0.000006 loss: 0.118 loss(reduced): 0.152 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.3GB text_tokens: 32049.0 tgs: 267 data_time: 0.56s time: 119.67s eta: 0:05:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:42:21][INFO] [Train] (Epoch 1) Step 592/593 lr: 0.000006 loss: 0.098 loss(reduced): 0.150 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 30953.0 tgs: 257 data_time: 0.57s time: 120.04s eta: 0:04:00 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:44:20][INFO] [Train] (Epoch 1) Step 593/593 lr: 0.000006 loss: 0.149 loss(reduced): 0.153 grad_norm: 0.06 if_nan_skip: 0 max_memory: 41.4GB text_tokens: 32379.0 tgs: 271 data_time: 0.65s time: 119.21s eta: 0:01:59 +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:56:17][SUCCESS] [HF Checkpoint] Elapsed time 717.26 seconds, peak gpu memory 17.4G +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:56:17][WARNING] The saved checkpoint cannot be resumed. If you want to save a resumable checkpoint, please remove `--checkpoint-drop-optimizer` from the command. +[XTuner][RANK 0][DP 0][SP 0][TP 0][2025-01-22 12:56:17][INFO] [Train] Cost 19:54:22