Skip to content

Commit

Permalink
Add a function to get the cost of the test/confirmation data and a fi…
Browse files Browse the repository at this point in the history
…eld for the test data in the training_data.toml
  • Loading branch information
Eliyaan committed Sep 21, 2023
1 parent 454a393 commit 66ff9b9
Show file tree
Hide file tree
Showing 7 changed files with 58 additions and 27 deletions.
16 changes: 7 additions & 9 deletions example.v
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,15 @@ fn main() {
neunet.init()
neunet.load_dataset("training_data.toml")

print(neunet.fprop_value([0.0, 0.0]))
print(neunet.fprop_value([0.0, 1.0]))
print(neunet.fprop_value([1.0, 0.0]))
print(neunet.fprop_value([1.0, 1.0]))


neunet.test_unseen_data()

neunet.train_backprop(2000)

neunet.test_unseen_data()

print(neunet.fprop_value([0.0, 0.0]))
print(neunet.fprop_value([0.0, 1.0]))
print(neunet.fprop_value([1.0, 0.0]))
print(neunet.fprop_value([1.0, 1.0]))
print(neunet.fprop_value(neunet.test_inputs[0]))
print(neunet.fprop_value(neunet.test_inputs[1]))
print(neunet.fprop_value(neunet.test_inputs[2]))
print(neunet.fprop_value(neunet.test_inputs[3]))
}
6 changes: 3 additions & 3 deletions nn_save[2, 3, 1].nntoml
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
cost=0.001372237961302231
weights=[[[2.78830558129151, -2.4290288975786414, 3.0675996328846584], [-2.794921461679556, 2.372315634179421, -2.931212183591993]], [[2.8886993342368115], [-2.0976613819415344], [-4.791614782551079]]]
biases=[[0.32667762528891187, 0.6403264422326904], [-1.3714550275472297, 1.2094593375604967, 1.3423211396810166], [4.340389299040728]]
cost=0.0012693678610653342
weights=[[[-3.3873047046207003, -2.8079573507983713, 1.307452493405567], [2.6111592840483384, -2.525466203168446, -3.1514551517117995]], [[4.3859346415228515], [-3.8811266953690287], [4.388845399680023]]]
biases=[[0.189372342136767, -0.9216008188524993], [-1.130482105521725, 0.6176617432504918, 0.03966044092452365], [0.6794390698715209]]
16 changes: 8 additions & 8 deletions preceptron/activations.v
Original file line number Diff line number Diff line change
Expand Up @@ -7,43 +7,43 @@ Different activation functions and their derivatives
*/

[inline]
fn tanh(value f64) f64 {
pub fn tanh(value f64) f64 {
return (m.exp(value) - m.exp(-value)) / (m.exp(value) + m.exp(-value))
}

[inline]
fn dtanh(value f64) f64 {
pub fn dtanh(value f64) f64 {
val := tanh(value)
return 1 - val * val
}

[inline]
fn relu(value f64) f64 {
pub fn relu(value f64) f64 {
return if value < 0 { 0 } else { value }
}

[inline]
fn drelu(value f64) f64 {
pub fn drelu(value f64) f64 {
return if value < 0 { 0.0 } else { 1.0 }
}

[inline]
fn leaky_relu(value f64) f64 {
pub fn leaky_relu(value f64) f64 {
return if value < 0 { value * 0.01 } else { value }
}

[inline]
fn dleaky_relu(value f64) f64 {
pub fn dleaky_relu(value f64) f64 {
return if value < 0 { 0.01 } else { 1.0 }
}

[inline]
fn sigmoid(value f64) f64 {
pub fn sigmoid(value f64) f64 {
return 1 / (1 + m.exp(-value))
}

[inline]
fn dsig(value f64) f64 {
pub fn dsig(value f64) f64 {
sigx := sigmoid(value)
return sigx * (1 - sigx)
}
8 changes: 4 additions & 4 deletions preceptron/backprop.v
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ pub fn (mut nn NeuralNetwork) train_backprop(nb_epochs u64) {
println('____________________________________________________________\nFinal Results: \nCost: ${nn.global_cost}')
}
if need_to_save && nn.save_path != '' {
println(' Saving the progress !')
println('Saving the progress !')
file := 'cost=${cost_to_save}\nweights=${get_weights(weights_to_save)}\nbiases=${get_biases(layers_to_save)}'
os.write_file(nn.save_path + nn.nb_neurons.str() + '.nntoml', file) or { panic(err) }
}
Expand All @@ -56,7 +56,7 @@ pub fn (mut nn NeuralNetwork) train_backprop(nb_epochs u64) {
Calculates the costs of each wieghts and biases
*/
//[direct_array_access]
fn (mut nn NeuralNetwork) backprop(index int) {
pub fn (mut nn NeuralNetwork) backprop(index int) {
nn.fprop_value(nn.training_inputs[index])

// Cost for the print
Expand Down Expand Up @@ -120,7 +120,7 @@ fn (mut nn NeuralNetwork) backprop(index int) {
/*
Apply the modifications based on the cost calculated in the backprop
*/
fn (mut nn NeuralNetwork) apply_delta() {
pub fn (mut nn NeuralNetwork) apply_delta() {
// Weights
for mut layer in nn.weights_list {
for mut weight_list in layer {
Expand All @@ -143,7 +143,7 @@ fn (mut nn NeuralNetwork) apply_delta() {
/*
Reset the costs that aren't reset in the backprop
*/
fn (mut nn NeuralNetwork) neurons_costs_reset() {
pub fn (mut nn NeuralNetwork) neurons_costs_reset() {
for mut layer in nn.layers_list[1..] {
for mut neuron in layer {
neuron.cost = 0.0
Expand Down
33 changes: 32 additions & 1 deletion preceptron/main_preceptron.v
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ mut:
}

pub struct NeuralNetwork {
pub:
learning_rate f64
nb_neurons []int
activ_funcs []fn (f64) f64
Expand All @@ -35,7 +36,7 @@ pub struct NeuralNetwork {
load_path string

deriv_activ_funcs []fn (f64) f64
mut:
pub mut:
// [layer_nbr][input_neuron_nbr][output_neuron_nbr]
weights_list [][][]Weight
// [layer_nbr][neuron_nbr]
Expand All @@ -44,6 +45,8 @@ mut:
global_cost f64
training_inputs [][]f64
expected_training_outputs [][]f64
test_inputs [][]f64
expected_test_outputs [][]f64
best_cost f64 = 100000000000
}

Expand Down Expand Up @@ -101,6 +104,8 @@ pub fn (mut nn NeuralNetwork) load_dataset(name string) {
file := toml.parse_file(name) or {panic(err)}
base_t_i_list := file.value("training_inputs").array()
base_e_t_o_list := file.value("expected_training_outputs").array()
base_test_i_list := file.value("test_inputs").array()
base_e_test_o_list := file.value("expected_test_outputs").array()

nn.training_inputs = [][]f64{}
nn.expected_training_outputs = [][]f64{}
Expand All @@ -116,6 +121,21 @@ pub fn (mut nn NeuralNetwork) load_dataset(name string) {
nn.expected_training_outputs[i] << value.f64()
}
}

nn.test_inputs = [][]f64{}
nn.expected_test_outputs = [][]f64{}
for i, test_i in base_test_i_list{
nn.test_inputs << []f64{}
for value in test_i.array(){
nn.test_inputs[i] << value.f64()
}
}
for i, e_test_o in base_e_test_o_list{
nn.expected_test_outputs << []f64{}
for value in e_test_o.array(){
nn.expected_test_outputs[i] << value.f64()
}
}
}

/*
Expand Down Expand Up @@ -143,6 +163,17 @@ pub fn (mut nn NeuralNetwork) fprop_value(inputs []f64) []f64 {
return get_outputs(nn.layers_list[nn.nb_neurons.len - 1])
}

pub fn (mut nn NeuralNetwork) test_unseen_data(){
nn.global_cost = 0
for index, inputs in nn.test_inputs{
for i, output in nn.fprop_value(inputs) { // for each output
tmp := output - nn.expected_training_outputs[index][i]
nn.global_cost += tmp * tmp
}
}
println("\nTest cost: ${nn.global_cost}")
}

/*
Input : Neuron array
Output : The outputs of the neuron array
Expand Down
2 changes: 1 addition & 1 deletion preceptron/secondary_functions.v
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ Other useful functions
To initialise the nn with random weights and biases
*/
[direct_array_access; inline]
fn (mut nn NeuralNetwork) set_rd_wb_values() {
pub fn (mut nn NeuralNetwork) set_rd_wb_values() {
// Weights
for mut layer in nn.weights_list {
for mut weights_list in layer {
Expand Down
4 changes: 3 additions & 1 deletion training_data.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
training_inputs=[[0.0, 0.0], [0.0, 1.0], [1.0, 0.0], [1.0, 1.0]]
expected_training_outputs=[[0.0], [1.0], [1.0], [0.0]]
expected_training_outputs=[[0.0], [1.0], [1.0], [0.0]]
test_inputs=[[0.0, 0.0], [0.0, 1.0], [1.0, 0.0], [1.0, 1.0]]
expected_test_outputs=[[0.0], [1.0], [1.0], [0.0]]

0 comments on commit 66ff9b9

Please sign in to comment.