Skip to content

Commit

Permalink
mark 1
Browse files Browse the repository at this point in the history
  • Loading branch information
Rabrg committed Nov 5, 2018
1 parent 10c65d1 commit 1ca74dc
Show file tree
Hide file tree
Showing 9 changed files with 113 additions and 19 deletions.
7 changes: 4 additions & 3 deletions Activation.cpp
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
#include "Activation.h"
#include <cmath>

double Activation::relu(Neuron neuron) {
double Activation::relu(double i) {
return 0;
}

double Activation::softmax(Neuron neuron) {
return 0;
double Activation::softmax(double i) {
return exp(i);
}
4 changes: 2 additions & 2 deletions Activation.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
class Activation {

public:
static double relu(Neuron neuron);
static double relu(double i);

static double softmax(Neuron neuron);
static double softmax(double u);

private:
Activation();
Expand Down
39 changes: 35 additions & 4 deletions NeuralLayer.cpp
Original file line number Diff line number Diff line change
@@ -1,7 +1,38 @@
#include <iostream>
#include "NeuralLayer.h"
#include "Activation.h"
#include <iostream>

NeuralLayer::NeuralLayer(int input_size, int layer_size, double (*activation)(Neuron)) : input_size(input_size),
layer_size(layer_size),
activation(activation) {
neurons = new Neuron *[layer_size];
NeuralLayer::NeuralLayer(int input_size, int layer_size, double (*activation)(double)) : input_size(input_size),
layer_size(layer_size),
activation(activation) {
neurons = new Neuron *[input_size];
for (int i = 0; i < input_size; i++) {
neurons[i] = new Neuron(layer_size);
}
}

double *NeuralLayer::forward(double *input) {
std::cout << "starting forward neural layer input: " << input_size << " layer_size: " << layer_size << std::endl;
auto output = new double[layer_size];

for (int i = 0; i < layer_size; i++) {
std::cout << "neuron: " << i << " started" << std::endl;

Neuron *neuron = neurons[i];
double value = neuron->forward(input);
value += neuron->bias;

output[i] = activation(value);
std::cout << "neuron: " << i << " completed" << std::endl;
}

if (activation == Activation::softmax) {
double sum = 0;
for (int i = 0; i < layer_size; i++)
sum += output[i];
for (int i = 0; i < layer_size; i++)
output[i] /= sum;
}
return output;
}
6 changes: 4 additions & 2 deletions NeuralLayer.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,11 @@ class NeuralLayer {
int layer_size;
Neuron **neurons;

double (*activation)(Neuron);
double (*activation)(double);

NeuralLayer(int input_size, int layer_size, double (*activation)(Neuron));
NeuralLayer(int input_size, int layer_size, double (*activation)(double));

double* forward(double* input);
};


Expand Down
50 changes: 47 additions & 3 deletions NeuralNetwork.cpp
Original file line number Diff line number Diff line change
@@ -1,15 +1,59 @@
#include "NeuralNetwork.h"
#include <iostream>
#include <fstream>

void NeuralNetwork::addNeuralLayer(int layer_size, double (*activation)(Neuron)) {
void NeuralNetwork::addNeuralLayer(int layer_size, double (*activation)(double)) {
layers.push_back(*new NeuralLayer(layers.back().layer_size, layer_size, activation));
}

void NeuralNetwork::addNeuralLayer(int input_size, int layer_size, double (*activation)(Neuron)) {
void NeuralNetwork::addNeuralLayer(int input_size, int layer_size, double (*activation)(double)) {
layers.push_back(*new NeuralLayer(input_size, layer_size, activation));
}

double *NeuralNetwork::evaluate(double *input) {
for (auto const &layer: layers) {
double *value = input;
for (auto &layer: layers) {
value = layer.forward(value);
}
return value;
}

void NeuralNetwork::loadParameters(const std::string path) {
std::ifstream in;
in.open(path);

int layer_count;
in >> layer_count;

for (int layer_index = 0; layer_index < layer_count; layer_index++) {
int input_size, hidden_size;
in >> input_size >> hidden_size;

auto **layer_weights = new double *[input_size];
for (int i = 0; i < input_size; ++i)
layer_weights[i] = new double[hidden_size];

auto *layer_biases = new double[hidden_size];

for (int i = 0; i < input_size; i++) {
for (int j = 0; j < hidden_size; j++) {
in >> layer_weights[i][j];
}
}

for (int j = 0; j < hidden_size; j++) {
in >> layer_biases[j];
}

for (int i = 0; i < input_size; i++) {
auto neuron = layers[layer_index].neurons[i];
for (int j = 0; j < hidden_size; j++) {
neuron->weights[j] = layer_weights[i][j];
}
for (int j = 0; j < hidden_size; j++) {
neuron->bias = layer_biases[j];
}
}
}
in.close();
}
7 changes: 5 additions & 2 deletions NeuralNetwork.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,21 @@
#define PEURAL_NEURALNETWORK_H

#include <vector>
#include <string>
#include "NeuralLayer.h"

class NeuralNetwork {

public:
std::vector<NeuralLayer> layers;

void addNeuralLayer(int layer_size, double (*activation)(Neuron));
void addNeuralLayer(int layer_size, double (*activation)(double));

void addNeuralLayer(int input_size, int layer_size, double (*activation)(Neuron));
void addNeuralLayer(int input_size, int layer_size, double (*activation)(double));

double* evaluate(double* input);

void loadParameters(const std::string path);
};


Expand Down
9 changes: 9 additions & 0 deletions Neuron.cpp
Original file line number Diff line number Diff line change
@@ -1,6 +1,15 @@
#include <iostream>
#include "Neuron.h"

Neuron::Neuron(int input_size) : input_size(input_size) {
weights = new double[input_size];
bias = 0;
}

double Neuron::forward(double *input) {
double sum = 0;
for (int i = 0; i < input_size; i++) {
sum += weights[i] * input[i];
}
return 0;
}
2 changes: 2 additions & 0 deletions Neuron.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ class Neuron {
double bias;

Neuron(int input_size);

double forward(double *input);
};


Expand Down
8 changes: 5 additions & 3 deletions main.cpp
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@

#include <iostream>
#include "NeuralNetwork.h"
#include "Activation.h"

Expand Down Expand Up @@ -55,10 +56,11 @@ int main(void) {
// Constructing the neural network
auto *network = new NeuralNetwork();
network->addNeuralLayer(INPUT_SIZE, HIDDEN_LAYER_1_SIZE, Activation::relu);
network->addNeuralLayer(HIDDEN_LAYER_2_SIZE, Activation::relu);
network->addNeuralLayer(OUTPUT_SIZE, Activation::softmax);
network->addNeuralLayer(HIDDEN_LAYER_1_SIZE, HIDDEN_LAYER_2_SIZE, Activation::relu);
network->addNeuralLayer(HIDDEN_LAYER_2_SIZE, OUTPUT_SIZE, Activation::softmax);

// TODO: Load weights
network->loadParameters("network.txt");
std::cout << "loaded params" << std::endl;

// Evaluating the neural network with a set of inputs
auto *evaluationInput = new double[INPUT_SIZE] { 1.5, 1.2, 5.3, 7.3 };
Expand Down

0 comments on commit 1ca74dc

Please sign in to comment.