-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlayer.cpp
263 lines (210 loc) · 6.11 KB
/
layer.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
/*
* layer.cpp
*
* Version information
* Author: Conall Hanley
* Date:22/01/2020
* Description: This file is the implementation file for the layer class and defines (implements) the functions of the class
*
* Copyright notice -
*/
#include "layer.h"
//double layer::sensetivity;
layer::layer()
{
}
layer::layer(const double* euler)
{
euler_ = euler;
}
layer::layer(std::vector<double> inputs, const double* euler)
{
euler_ = euler;
this->init_vector();
for (int i = 0; i < inputs.size(); i++)
{
neuron newinput(i, 0.0, euler);
newinput.set_activation(inputs.at(i));
//neurons_.reserve(sizeof(newinput) + (neurons_.size()* sizeof(newinput)));
//this->addneuron(newinput);
neurons_->push_back(newinput);
//neurons_->at(i).init_activation();
}
}
layer::~layer()
{
//delete neurons_;
}
std::vector<neuron>* layer::getneurons() const
{
return neurons_;
}
void layer::addneuron(neuron n)
{
if (neurons_ != nullptr)
{
this->neurons_->push_back(n);
//this->neurons_->at(neurons_->size() - 1).init_activation();
}
else {
std::cout << "Vector must be initalized!" << std::endl;
}
}
void layer::removeneuron(int number)
{
for (int i = 0; i < neurons_->size(); i++) {
if (neurons_->at(i).getnumber() == number) {
//neurons_->at(i).clear_activation();
neurons_->at(i).clear_weights();
neurons_->erase(neurons_->begin() + i);
}
}
}
/*
neuron layer::getneuron(int number)
{
if (neurons_->at(number).getnumber() == number) {
return neurons_->at(number);
}
return neurons_->at(number);
} */
void layer::setnuerons(std::vector<neuron> inputs)
{
for (int i = 0; i < this->neurons_->size(); i++){
this->neurons_->at(i) = inputs[i];
}
}
void layer::conntectneurons(layer l)
{
std::vector<neuron> lneurons = *l.getneurons();
std::vector<double> weights;
for (int i = 0; i > lneurons.size(); i++)
{
// weights.push_back(lneurons[i].getweight());
}
for (int i = 0; i < neurons_->size(); i++)
{
//neurons_[i].calculateoutput(&weights);
}
}
double layer::getsensitivity() const
{
return sensetivity;
}
//Layer takes the previous values as an input
void layer::calculatesensitivity(layer& lastgen)
{
double Y = 1;
double Yprime = 1;
double out;
for (int i = 0; i < this->neurons_->size(); i++) {
//double changeweight = neurons_[i].getweight() - lastgen.getneuron(i).getweight();
double changebias = this->neurons_->at(i).getbias() - lastgen.getneurons()->at(i).getbias();
// Y *= changeweight / changebias;
}
for (int i = 0; i < this->neurons_->size(); i++) {
// double changeweight = neurons_[i].getweight() - lastgen.getneuron(i).getweight();
double changebias = neurons_->at(i).getbias() - lastgen.getneurons()->at(i).getbias();
// Yprime *= (changeweight / changebias * changebias) * -1;
}
out = Y - Yprime;
if (out < 0){
out *= -1;
}
sensetivity = out;
}
double layer::getcost() const
{
return cost_;
}
void layer::caluclatecost(layer correct)
{
int n = this->neurons_->size();
double avg = 0.0;
for (int i = 0; i < n; i++) {
// avg += (neurons_[i].getweight() - correct.getneuron(i).getweight()) * (neurons_[i].getweight() - correct.getneuron(i).getweight());
}
avg = avg / (double) n;
cost_ = 1.0 - avg;
}
void layer::nudge()
{
int n = this->neurons_->size();
//Only enter loop if there is a next layer
/*
for (int i = 0; i < n; i++){
//I think the answer is to add the learning rate if the neuron is higher and subtract if lower
if (nextLayer->getneuron(i).getweight() > this->getneuron(i).getweight()){
this->getneuron(i).setbias(this->getneuron(i).getbias() + rate); //If connected neuron is firing, this one will be more likly to fire
}
else {
this->getneuron(i).setbias(this->getneuron(i).getbias() - rate); //subtract if not firing
}
} */
}
std::vector<double> layer::getactivations()
{
std::vector<double> out;
for (int i = 0; i < this->neurons_->size(); i++) {
out.push_back(this->neurons_->at(i).get_activation());
}
return out;
}
long layer::getsize() {
return this->neurons_->size();
}
//gets weights for a particular node
std::vector<double> layer::getweights()
{
std::vector<double> out;
for (int i = 0; i < this->neurons_->size(); i++) {
for (int e = 0; e < sizeof(*neurons_->at(i).getweights()); e++) {
out.push_back(neurons_->at(i).getweights()[e]);
}
}
return out;
}
void layer::feedforward(layer prev) {
//std::vector<double> weights = prev.getweights();
std::vector<double> activations = prev.getactivations();
for (int i = 0; i < neurons_->size(); i++) {
double weights[prev.getneurons()->size()];
for (int e = 0; e < prev.getneurons()->size(); e++)
{
weights[e] = prev.getneurons()->at(e).getweights()[e];
}
neurons_->at(i).calculateoutput_sigmoid( activations, weights, prev.getneurons()->size());
}
}
void layer::setsensitivity(double sense)
{
sensetivity = sense;
}
void layer::init_vector()
{
if (neurons_ == nullptr)
{
neurons_ = new std::vector<neuron>;
}
else {
std::cout << "Vetcor already initalized!" << std::endl;
}
}
void layer::delete_vector()
{
delete neurons_;
neurons_ = nullptr;
}
void layer::copyweights(layer copyfrom, int nextlayersize)
{
//Also copies biases
if (copyfrom.getneurons()->size() > this->neurons_->size() || copyfrom.getneurons()->size() < this->neurons_->size())
{
printf("Layers must be same size to copy weights!!!!");
return;
}
for (int i = 0; i < this->neurons_->size(); i++){
this->neurons_->at(i).setweights(copyfrom.getneurons()->at(i).getweights(), nextlayersize);
this->neurons_->at(i).setbias(copyfrom.getneurons()->at(i).getbias());
}
}