From 43c833fdd3a1ebedb384b4f902526632888774c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Celiebak=E2=80=9D?= Date: Fri, 20 Sep 2024 06:11:30 +0000 Subject: [PATCH] remove print --- src/nanotron/trainer.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/nanotron/trainer.py b/src/nanotron/trainer.py index 76b8fa4a..2e4be82b 100644 --- a/src/nanotron/trainer.py +++ b/src/nanotron/trainer.py @@ -336,10 +336,6 @@ def post_training(self): if self.s3_mover is not None: self.s3_mover.distributed_wait_for_completion(group=self.parallel_context.world_pg) - def post_training(self): - if self.s3_mover is not None: - self.s3_mover.distributed_wait_for_completion(group=self.parallel_context.world_pg) - def _print_training_plan(self): if hasattr(self.config, "data_stages") and self.config.data_stages is not None: stages_info = "".join( @@ -927,8 +923,6 @@ def post_save_checkpoint(self): def save_checkpoint(self) -> Path: self.pre_save_checkpoint() checkpoints_path = self.config.checkpoints.checkpoints_path - print(f"config: {self.config}") - print(f"checkpoints_path: {checkpoints_path}") checkpoint_path = Path(checkpoints_path) / f"{self.iteration_step}" if self.config.checkpoints.checkpoints_path_is_shared_file_system: should_mkdir = dist.get_rank(self.parallel_context.world_pg) == 0