Skip to content

Commit

Permalink
remove unused tp param
Browse files Browse the repository at this point in the history
  • Loading branch information
Muhtasham authored Dec 7, 2024
1 parent 7281e0b commit 6dbfb3c
Showing 1 changed file with 0 additions and 2 deletions.
2 changes: 0 additions & 2 deletions torchtitan/parallelisms/parallelize_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,6 @@ def parallelize_llama(
dp_mesh,
param_dtype=TORCH_DTYPE_MAP[job_config.training.mixed_precision_param],
reduce_dtype=TORCH_DTYPE_MAP[job_config.training.mixed_precision_reduce],
tp_enabled=parallel_dims.tp_enabled,
pp_enabled=parallel_dims.pp_enabled,
cpu_offload=job_config.training.enable_cpu_offload,
)
Expand Down Expand Up @@ -338,7 +337,6 @@ def apply_fsdp(
dp_mesh: DeviceMesh,
param_dtype: torch.dtype,
reduce_dtype: torch.dtype,
tp_enabled: bool,
pp_enabled: bool,
cpu_offload: bool = False,
):
Expand Down

0 comments on commit 6dbfb3c

Please sign in to comment.