Skip to content

Commit

Permalink
CosAnnealLR
Browse files Browse the repository at this point in the history
  • Loading branch information
ChanLumerico committed Aug 8, 2024
1 parent 590ccb1 commit 8dc5595
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 7 deletions.
4 changes: 2 additions & 2 deletions luma/__import__.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@
from luma.neural.loss import CrossEntropy, BinaryCrossEntropy, MSELoss
from luma.neural.loss import HingeLoss, HuberLoss, KLDivergenceLoss, NLLLoss
from luma.neural.init import KaimingInit, XavierInit
from luma.neural.scheduler import StepLR, ExponentialLR
from luma.neural.scheduler import StepLR, ExponentialLR, CosineAnnealingLR
from luma.neural.block import (
ConvBlock1D,
ConvBlock2D,
Expand Down Expand Up @@ -314,7 +314,7 @@

KaimingInit, XavierInit

StepLR, ExponentialLR,
StepLR, ExponentialLR, CosineAnnealingLR

SimpleMLP, SimpleCNN,
LeNet_1, LeNet_4, LeNet_5,
Expand Down
40 changes: 35 additions & 5 deletions luma/neural/scheduler.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
import numpy as np
import math

from luma.neural.base import Scheduler


__all__ = ("StepLR", "ExponentialLR")
__all__ = (
"StepLR",
"ExponentialLR",
"CosineAnnealingLR",
)

# TODO: Please add docstrings in the future development.


class StepLR(Scheduler):
Expand All @@ -17,7 +24,6 @@ def __init__(
self.init_lr = init_lr
self.step_size = step_size
self.gamma = gamma

self.type_ = "epoch"

@property
Expand All @@ -39,14 +45,38 @@ def __init__(
super().__init__(init_lr)
self.init_lr = init_lr
self.gamma = gamma
self.type_ = "epoch"

@property
def new_learning_rate(self) -> float:
epoch_index = self.iter // self.n_iter
new_lr = self.init_lr * (self.gamma**epoch_index)

self.lr_trace.append(new_lr)
return new_lr


class CosineAnnealingLR(Scheduler):
def __init__(
self,
init_lr: float,
T_max: int,
eta_min: float = 0,
) -> None:
super().__init__(init_lr)
self.init_lr = init_lr
self.T_max = T_max
self.eta_min = eta_min
self.type_ = "epoch"

@property
def new_learning_rate(self) -> float:
epoch_index = self.iter // self.n_iter
new_lr = self.init_lr * (self.gamma ** epoch_index)

new_lr = (
self.eta_min
+ (self.init_lr - self.eta_min)
* (1 + math.cos(math.pi * epoch_index / self.T_max))
/ 2
)
self.lr_trace.append(new_lr)
return new_lr

0 comments on commit 8dc5595

Please sign in to comment.