-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathNN.cpp
59 lines (40 loc) · 1.5 KB
/
NN.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
#include "NN.hpp"
NN::NN(std::vector<int> topology, std::vector<std::string> act_funcs1, float learningRate, bool bDebug){
this->topology = topology;
this->learningRate = learningRate;
this->bDebug = bDebug;
this->act_funcs = act_funcs;
for (int i=0; i<act_funcs1.size(); i++)
this->act_funcs.push_back(act_funcs1[i]);
for (unsigned int i=0; i<topology.size()-1; i++)
model.push_back(new NNLayer(topology[i]+1,topology[i+1],act_funcs[i],learningRate));
/*
for (unsigned int i=0; i<topology.size()-1; i++){
if (i<topology.size()-2)
model.push_back(new NNLayer(topology[i]+1,topology[i+1],"relu",learningRate));
else
model.push_back(new NNLayer(topology[i]+1,topology[i+1],"none",learningRate));
}
*/
}
NN::~NN(){}
void NN::debug_mode(bool bdbg){
this->bDebug = bdbg;
}
void NN::forward(Eigen::RowVectorXf& input, Eigen::RowVectorXf& output){
Eigen::RowVectorXf* vals = new Eigen::RowVectorXf(input.size());
*vals = input;
for (unsigned int i=0; i<model.size(); i++)
vals = model[i]->forward(*vals);
output = *vals;
}
void NN::backward(Eigen::RowVectorXf& actions, Eigen::RowVectorXf& experimentals){
Eigen::RowVectorXf* delta = new Eigen::RowVectorXf(actions.size());
(*delta) = actions - experimentals;
for (int i=model.size()-1; i>=0; i--)
delta = model[i]->backward(*delta);
}
void NN::update_time(){
for (int i=0; i<model.size(); i++)
model[i]->update_time();
}