-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfeedforward.phi
125 lines (108 loc) · 3.36 KB
/
feedforward.phi
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
#!/usr/bin/phi run
/* TODO: modules (ie. namespaces), this might be a good syntax?
feedfoward :: module {
hidden_node :: struct {
output: float;
bias: float;
biasDelta: float;
weights: [1]float;
weightDeltas: [1]float;
}
output_node :: struct {
output: float;
error: float;
bias: float;
biasDelta: float;
weights: [2]float;
weightDeltas: [2]float;
}
network :: struct {
learningRate: float;
inputs: [1]input_node;
hidden: [2]hidden_node;
outputs: [1]output_node;
}
}
*/
netInput :: func(inputs: []float, weights: []float) -> float {
result := 0.0;
for input, index in inputs:
result += input * weights[index];
return result;
}
tanhError :: func(output: float, errors: []float, weights: []float) -> float {
result := 0.0;
for error, index in errors:
result += input * weights[index];
return result * (1 - output * output);
}
hidden_node :: struct {
output: float;
bias: float;
biasDelta: float;
weights: [1]float;
weightDeltas: [1]float;
}
output_node :: struct {
bias: float;
biasDelta: float;
weights: [2]float;
weightDeltas: [2]float;
}
// a feedfoward neural network with a hidden layer, back propogation, and online learning
feedforward_network :: struct {
learningRate: float;
hidden: [2]hidden_node;
outputs: [1]output_node;
}
trainFeedforward :: func(net: ^feedforward_network, inputs: [1]float, outputs: [1]float) {
hiddenOutputs : [2] float;
for node, index in net.hidden:
// 1. Compute net input and output for hidden nodes (tanh)
// TODO: intrinsics, modulize math?, etc
hiddenOutputs[index] = math_tanh(netInput(inputs, node.weights));
outputErrors : [1] float;
for node, index in net.outputs {
// 2. Compute net input and ouput for output nodes (identity)
output := netInput(net.inputs, node.weights);
// 3. Compute error for outputs nodes
error := outputs[index] - output;
outputErrors[index] = error;
// 4. Update weights for output nodes (online learning)
node.biasDelta = node.biasDelta + error;
node.bias = node.bias + (node.biasDelta * net.learningRate);
for weight, index in node.weights {
weightDelta := node.weightDeltas[index] + (error * hiddenOutputs[index]);
node.weights[index] = weight + (weightDelta * net.learningRate);
node.weightDeltas[index] = weightDelta;
}
}
for ^node, index in net.hidden {
// 5. Compute error for hidden nodes
error := tanhError(node.output, errors, node.weights);
// 6. Update weights for hidden nodes (online learning)
node.biasDelta = node.biasDelta + error;
node.bias = node.bias + (node.biasDelta * net.learningRate);
for weight, index in node.weights {
weightDelta := node.weightDeltas[index] + (error * inputs[index]);
node.weights[index] = weight + (weightDelta * net.learningRate);
node.weightDeltas[index] = weightDelta;
}
}
}
example_pattern :: struct {
inputs: []float;
outputs: []float;
}
/* not really sure what the best syntax for array and struct "literals" is
examples : []example_pattern = [
{[ 0.4, 0.1, 0.2, 0.7, 0.3], [0.1, 0.2, 0.3, 0.4, 0.5]},
{[0.25, 0.35, 0.45, 0.6, 1.1], [0.1, 0.2, 0.3, 0.4, 0.5]},
];
*/
examples : []example_pattern;
main :: func() {
neuralNet : feedforward_network;
for example in examples:
trainFeedforward(^neuralNet, example.inputs, example.outputs);
}