Skip to content

Commit

Permalink
remove print
Browse files Browse the repository at this point in the history
  • Loading branch information
eliebak committed Sep 20, 2024
1 parent 5e8361c commit 43c833f
Showing 1 changed file with 0 additions and 6 deletions.
6 changes: 0 additions & 6 deletions src/nanotron/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,10 +336,6 @@ def post_training(self):
if self.s3_mover is not None:
self.s3_mover.distributed_wait_for_completion(group=self.parallel_context.world_pg)

def post_training(self):
if self.s3_mover is not None:
self.s3_mover.distributed_wait_for_completion(group=self.parallel_context.world_pg)

def _print_training_plan(self):
if hasattr(self.config, "data_stages") and self.config.data_stages is not None:
stages_info = "".join(
Expand Down Expand Up @@ -927,8 +923,6 @@ def post_save_checkpoint(self):
def save_checkpoint(self) -> Path:
self.pre_save_checkpoint()
checkpoints_path = self.config.checkpoints.checkpoints_path
print(f"config: {self.config}")
print(f"checkpoints_path: {checkpoints_path}")
checkpoint_path = Path(checkpoints_path) / f"{self.iteration_step}"
if self.config.checkpoints.checkpoints_path_is_shared_file_system:
should_mkdir = dist.get_rank(self.parallel_context.world_pg) == 0
Expand Down

0 comments on commit 43c833f

Please sign in to comment.