-
Notifications
You must be signed in to change notification settings - Fork 75
/
ToySVM.py
54 lines (41 loc) · 1.65 KB
/
ToySVM.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
'''
From Assembly.AI
'''
import numpy as np
class SVM:
def __init__(self, learning_rate=0.001, lambda_param=0.01, n_iters=1000) -> None:
self.lr = learning_rate
self.lambda_param = lambda_param
self.n_iters = n_iters
self.w = None
self.b = None
def fit(self, X, y):
n_samples, n_features = X.shape
# Verify that y has labels -1,1, and not 0,1
y_ = np.where(y <= 0, -1, 1)
self.w = np.zeros(n_features)
self.b = 0
# Optimization. Gradient descent minimize J
# J is the separation between the support vectors and each class
#
# J = (1/n * sum(max(0,1 - y(wx-b))}] + ƛ ||w||^2))
# Hinge loss + regularizer
# -> Min hinge loss and at the same time maximize the margin
for _ in range(self.n_iters):
for idx, x_i in enumerate(X):
condition = y_[idx] * (np.dot(x_i, self.w) - self.b) >= 1
# dj/dw and dJ/db leads to the following update formula
if condition:
self.w -= self.lr * (2* self.lambda_param * self.w)
else:
self.w -= self.lr * (
2 * self.lambda_param * self.w - np.dot(x_i, y_[idx])
)
self.b -= self.lr * y_[idx]
def predict(self,X):
approx = np.dot(X, self.w) - self.b
# The linear model is applied to the input, and it is transformed
# to the space where the separation hyperplane lives.
# The sign is later used to see on which side of the plane this new
# data lives
return np.sign(approx)