-
Notifications
You must be signed in to change notification settings - Fork 0
/
Layer.h
64 lines (56 loc) · 2.03 KB
/
Layer.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
/**
* @file Layer.h.
*
* @brief Declares the structure for a layer
*/
#include <vector>
#include <armadillo>
using namespace std;
using namespace arma;
/**
* @class Layer
*
* @brief A layer of a neural network.
* Consists of neurons and weights (connections with other layers).
*
* @author Thomas Fisher
* @date 04/05/2017
*/
class Layer
{
public:
Layer(unsigned inputSize, unsigned outputSize);
Layer();
~Layer();
void initialiseWeights(); //Init weights to random numbers
double sigmoid(double x) { return tanh(x); }
double sigmoidDerivative(double x) { return 1.0 - x * x; } // tanh derivative
double altSigmoid(double x) { return 1.0 / (1.0 + exp(-x)); }
vector<double> feedForward(Layer &prevLayer);
double calculateError(double expected);
void updateWeights(Layer &prevLayer);
size_t size() { return m_outputs.size(); }
double getWeight(unsigned row, unsigned col) { return m_weights.at(row, col); }
void setWeight(unsigned row, unsigned col, double newWeight) { m_weights.at(row, col) = newWeight; }
double getWeightChange(unsigned row, unsigned col) { return m_weightChanges.at(row, col); }
void setWeightChange(unsigned row, unsigned col, double newWeightChange) { m_weightChanges.at(row, col) = newWeightChange; }
double getGradient(unsigned i) { return m_gradients[i]; }
unsigned getOutputSize() { return m_outputSize; }
double getOutput(unsigned i) { return m_outputs[i]; }
vector<double> getOutputs() { return m_outputs; }
void initialiseInputs(vector<double> sample) { m_outputs = sample; }
void calcOutputGradient(double target);
void backPropagate(Layer &nextLayer);
double sumDerivativeOfWeights(Layer &nextLayer);
void setEta(double eta) { m_eta = eta; }
private:
vector<double> m_outputs; // Neuron values
vector<double> m_rawOutputs; // Before thresholding
vector<double> m_gradients;
unsigned m_inputSize, m_outputSize;
mat m_weights;
mat m_weightChanges;
double m_eta = 0.01;
//const double ETA = 0.01; // Learning rate constant
const double ALPHA = 0.5; // Momentum, fraction of last weight
};