-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathlDense.h
121 lines (75 loc) · 2.19 KB
/
lDense.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
#ifndef LDENSE_H
#define LDENSE_H
#include "Layer.h"
#include <cmath>
class lDense : public Layer {
private:
//Weights ans Biases
Tensor weights;
Tensor bias;
//Gradients
Tensor dCdX;
Tensor dCdW;
Tensor dCdB;
public:
//Constructor
lDense(int in_size, int out_size) {
//Set dimensions
this->in_dim = 1;
this->in_rows = in_size;
this->in_cols = 1;
this->out_dim = 1;
out_rows = out_size;
out_cols = 1;
//Redimension matrices
in.resize(in_dim, in_rows, in_cols);
out.resize(out_dim, out_rows, out_cols);
weights.resize(in_dim, out_rows, in_rows);
bias.resize(in_dim, out_rows, out_cols);
dCdX.resize(in_dim, in_rows, in_cols);
dCdW.resize(in_dim, out_rows, in_rows);
dCdB.resize(in_dim, out_rows, out_cols);
//Initialise weights and biases
weights.randn(0.0, 0.1);
bias.randn(0.0, 1.0);
}
//Properties
char getType() { return 'd'; }
Tensor getWeights() { return weights; }
//Functions
Tensor feedforward( Tensor in ) {
this->in = in.copy();
for (int i = 0; i < out_rows; i++) {
out(0, i, 0) = bias(0, i, 0);
for (int j = 0; j < in_rows; j++) {
out(0, i, 0) += weights(0, i, j) * in(0, j, 0);
}
//Apply non-linearity
out(0, i, 0) = tanh(out(0, i, 0));
}
return out;
}
Tensor feedback( Tensor delta ) {
dCdX.set(0);
dCdW.set(0);
dCdB.set(0);
for (int i = 0; i < out_rows; i++) {
dCdB(0, i, 0) = delta(0, i, 0) * (1 - pow(out(0, i, 0), 2));
for (int j = 0; j < in_rows; j++) {
dCdW(0, i, j) = dCdB(0, i, 0) * in(0, j, 0);
dCdX(0, j, 0) += dCdB(0, i, 0) * weights(0, i, j);
}
}
return dCdX;
}
void updateweights( float rate ) {
for (int i = 0; i < out_rows; i++) {
bias(0, i, 0) -= rate * dCdB(0, i, 0);
for (int j = 0; j < in_rows; j++) {
weights(0, i, j) -= rate * dCdW(0, i, j);
}
}
return;
}
};
#endif