forked from awni/semantic-rntn
-
Notifications
You must be signed in to change notification settings - Fork 0
/
sgd.py
78 lines (61 loc) · 2.46 KB
/
sgd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import numpy as np
import random
class SGD:
def __init__(self, model, alpha=1e-2, minibatch=30,
optimizer='sgd'):
self.model = model
assert self.model is not None, "Must define a function to optimize"
self.it = 0
self.alpha = alpha # learning rate
self.minibatch = minibatch # minibatch
self.optimizer = optimizer
if self.optimizer == 'sgd':
print("Using sgd...")
elif self.optimizer == 'adagrad':
print("Using adagrad...")
epsilon = 1e-8
self.gradt = [epsilon + np.zeros(W.shape) for W in self.model.stack]
else:
raise ValueError("Invalid optimizer")
self.costt = []
self.expcost = []
def run(self, trees):
"""
Runs stochastic gradient descent with model as objective.
"""
m = len(trees)
# randomly shuffle data
random.shuffle(trees)
for i in range(0, m - self.minibatch + 1, self.minibatch):
self.it += 1
mb_data = trees[i:i + self.minibatch]
cost, grad = self.model.cost_and_grad(mb_data)
# compute exponentially weighted cost
if np.isfinite(cost):
if self.it > 1:
self.expcost.append(.01 * cost + .99 * self.expcost[-1])
else:
self.expcost.append(cost)
if self.optimizer == 'sgd':
update = grad
scale = -self.alpha
elif self.optimizer == 'adagrad':
# trace = trace+grad.^2
self.gradt[1:] = [gt + g ** 2
for gt, g in zip(self.gradt[1:], grad[1:])]
# update = grad.*trace.^(-1/2)
update = [g * (1. / np.sqrt(gt))
for gt, g in zip(self.gradt[1:], grad[1:])]
# handle dictionary separately
dL = grad[0]
dLt = self.gradt[0]
for j in dL.keys():
dLt[:, j] = dLt[:, j] + dL[j] ** 2
dL[j] = dL[j] * (1. / np.sqrt(dLt[:, j]))
update = [dL] + update
scale = -self.alpha
# update params
self.model.update_params(scale, update, log=False)
self.costt.append(cost)
if self.it % 1 == 0:
print("Iter %d : Cost=%.4f, ExpCost=%.4f." % (self.it, cost, self.expcost[-1]))