-
Notifications
You must be signed in to change notification settings - Fork 0
/
Network.py
36 lines (29 loc) · 1.41 KB
/
Network.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import Layer
import OutputLayer
import numpy as np
class Network:
def __init__(self, inputSize, outputSize, hiddenLayerSizes, activationFunction="Sigmoid"):
self.Layers = []
layerSizes = [inputSize]
layerSizes.extend(hiddenLayerSizes)
layerSizes.append(outputSize)
for i in range(0, len(layerSizes) - 1):
newLayer = Layer.Layer(layerSizes[i], layerSizes[i+1], activationFunction)
newLayer.RandomizeWeights()
self.Layers.append(newLayer)
self.Layers[0].Mode = "Linear"
self.OutputLayer = OutputLayer.OutputLayer(outputSize)
def Feedforwad(self, input):
nextLayerInput = input.copy()
for layer in self.Layers:
nextLayerInput = layer.Feedforward(nextLayerInput)
self.OutputLayer.Activate(nextLayerInput)
return self.OutputLayer.GetOutput()
def GetError(self, expected):
return self.OutputLayer.GetError(expected)
def Backprop(self, expected, learningRate):
nextLayerDeltas = self.OutputLayer.GetDeltas(expected)
for layer in reversed(self.Layers):
gradient = np.dot(nextLayerDeltas.reshape(nextLayerDeltas.size, 1), layer.Activations.reshape(1,layer.Activations.size))
layer.Weights = np.add(layer.Weights, np.multiply(gradient,learningRate))
nextLayerDeltas = layer.MakeDeltas(nextLayerDeltas)