forked from lazyprogrammer/facial-expression-recognition
-
Notifications
You must be signed in to change notification settings - Fork 0
/
ann_sigmoid.py
89 lines (68 loc) · 2.61 KB
/
ann_sigmoid.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
from __future__ import print_function, division
from builtins import range
# Note: you may need to update your version of future
# sudo pip install -U future
import numpy as np
import matplotlib.pyplot as plt
from sklearn.utils import shuffle
from util import getBinaryData, sigmoid, sigmoid_cost, error_rate, relu
class ANN(object):
def __init__(self, M):
self.M = M
def fit(self, X, Y, learning_rate=5*10e-7, reg=1.0, epochs=10000, show_fig=False):
X, Y = shuffle(X, Y)
Xvalid, Yvalid = X[-1000:], Y[-1000:]
X, Y = X[:-1000], Y[:-1000]
N, D = X.shape
self.W1 = np.random.randn(D, self.M) / np.sqrt(D)
self.b1 = np.zeros(self.M)
self.W2 = np.random.randn(self.M) / np.sqrt(self.M)
self.b2 = 0
costs = []
best_validation_error = 1
for i in range(epochs):
# forward propagation and cost calculation
pY, Z = self.forward(X)
# gradient descent step
pY_Y = pY - Y
self.W2 -= learning_rate*(Z.T.dot(pY_Y) + reg*self.W2)
self.b2 -= learning_rate*((pY_Y).sum() + reg*self.b2)
# print "(pY_Y).dot(self.W2.T) shape:", (pY_Y).dot(self.W2.T).shape
# print "Z shape:", Z.shape
# dZ = np.outer(pY_Y, self.W2) * (Z > 0)
dZ = np.outer(pY_Y, self.W2) * (1 - Z*Z)
self.W1 -= learning_rate*(X.T.dot(dZ) + reg*self.W1)
self.b1 -= learning_rate*(np.sum(dZ, axis=0) + reg*self.b1)
if i % 20 == 0:
pYvalid, _ = self.forward(Xvalid)
c = sigmoid_cost(Yvalid, pYvalid)
costs.append(c)
e = error_rate(Yvalid, np.round(pYvalid))
print("i:", i, "cost:", c, "error:", e)
if e < best_validation_error:
best_validation_error = e
print("best_validation_error:", best_validation_error)
if show_fig:
plt.plot(costs)
plt.show()
def forward(self, X):
# Z = relu(X.dot(self.W1) + self.b1)
Z = np.tanh(X.dot(self.W1) + self.b1)
return sigmoid(Z.dot(self.W2) + self.b2), Z
def predict(self, X):
pY = self.forward(X)
return np.round(pY)
def score(self, X, Y):
prediction = self.predict(X)
return 1 - error_rate(Y, prediction)
def main():
X, Y = getBinaryData()
X0 = X[Y==0, :]
X1 = X[Y==1, :]
X1 = np.repeat(X1, 9, axis=0)
X = np.vstack([X0, X1])
Y = np.array([0]*len(X0) + [1]*len(X1))
model = ANN(100)
model.fit(X, Y, show_fig=True)
if __name__ == '__main__':
main()