Skip to content

Commit

Permalink
Merge pull request #311 from MannLabs/max-lr-bug
Browse files Browse the repository at this point in the history
BUG max-lr not used correctly
  • Loading branch information
GeorgWa authored Aug 21, 2024
2 parents 190de04 + fd8e675 commit dad6ed5
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions alphadia/transferlearning/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def __init__(self, optimizer: torch.optim.Optimizer, **kwargs):
self.reduce_lr_on_plateau = torch.optim.lr_scheduler.ReduceLROnPlateau(
optimizer,
mode="min",
patience=settings["lr_patience"],
patience=3,
factor=0.5,
)
self.warmup_lr = LambdaLR(optimizer, self._warmup)
Expand Down Expand Up @@ -547,7 +547,7 @@ def finetune_ms2(
epoch=self.settings["epochs"],
batch_size=self.settings["batch_size"],
warmup_epoch=self.settings["warmup_epochs"],
lr=settings["max_lr"],
lr=self.settings["max_lr"],
)

self._test_ms2(
Expand Down Expand Up @@ -706,7 +706,7 @@ def finetune_rt(self, psm_df: pd.DataFrame) -> pd.DataFrame:
batch_size=self.settings["batch_size"],
epoch=self.settings["epochs"],
warmup_epoch=self.settings["warmup_epochs"],
lr=settings["max_lr"],
lr=self.settings["max_lr"],
)

self._test_rt(
Expand Down Expand Up @@ -888,7 +888,7 @@ def finetune_charge(self, psm_df: pd.DataFrame) -> pd.DataFrame:
batch_size=self.settings["batch_size"],
epoch=self.settings["epochs"],
warmup_epoch=self.settings["warmup_epochs"],
lr=settings["max_lr"],
lr=self.settings["max_lr"],
)

self._test_charge(
Expand Down Expand Up @@ -1050,7 +1050,7 @@ def finetune_ccs(self, psm_df: pd.DataFrame) -> pd.DataFrame:
batch_size=self.settings["batch_size"],
epoch=self.settings["epochs"],
warmup_epoch=self.settings["warmup_epochs"],
lr=settings["max_lr"],
lr=self.settings["max_lr"],
)

self._test_ccs(
Expand Down

0 comments on commit dad6ed5

Please sign in to comment.