-
Notifications
You must be signed in to change notification settings - Fork 6
/
learning_rate.py
74 lines (60 loc) · 2.19 KB
/
learning_rate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import numpy as np
import theano
class AnnealedLearningRate(object):
"""A callback to adjust the learning rate on each freq (batch or epoch).
The learning rate will be annealed by 1/t at each freq.
Parameters:
anneal_start: int
the epoch when to start annealing.
"""
def __init__(self, anneal_start, freq='epoch'):
self._initialized = False
self._count = 0.
self._anneal_start = anneal_start
self.freq = freq
def __call__(self, learning_rate):
"""Updates the learning rate according to the annealing schedule.
"""
if not self._initialized:
self._base = learning_rate.get_value()
self._initialized = True
self._count += 1
learning_rate.set_value(
np.cast[theano.config.floatX](self.get_current_learning_rate()))
def get_current_learning_rate(self):
"""Calculate the current learning rate according to the annealing
schedule.
"""
return self._base * min(1, self._anneal_start / self._count)
class ExponentialDecayLearningRate(object):
"""
This anneals the learning rate by dviding it by decay_factor after
each update (freq='batch').
lr = lr * decay_factor**(-t)
Parameters:
decay_factor: float
de the decay factor
min_lr: float
The lr will be fixed to min_lr when it's reached.
"""
def __init__(self, decay_factor, min_lr):
self._count = 0
self._min_reached = False
self.min_lr = min_lr
self.decay_factor = decay_factor
self.freq = 'batch'
def __call__(self, learning_rate):
"""Update the learning rate according to the exponential decay
schedule.
"""
if self._count == 0.:
self._base_lr = learning_rate.get_vale()
self._count += 1
if not self._min_reached:
new_lr = self._base_lr * (self.decay_factor ** (-self._count))
if new_lr <= self.min_lr:
self._min_reached = True
new_lr = self._min_reached
else:
new_lr = self.min_lr
learning_rate.set_value(np.cast[theano.config.floatX](new_lr))