Skip to content

Commit

Permalink
refactor Backpropagation methods and simplify things
Browse files Browse the repository at this point in the history
  • Loading branch information
akondas committed Aug 10, 2016
1 parent 66d029e commit c506a84
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 14 deletions.
52 changes: 39 additions & 13 deletions src/Phpml/NeuralNetwork/Training/Backpropagation.php
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,11 @@ class Backpropagation implements Training
*/
private $theta;

/**
* @var array
*/
private $sigmas;

/**
* @param Network $network
* @param int $theta
Expand Down Expand Up @@ -71,20 +76,22 @@ private function trainSamples(array $samples, array $targets, float $desiredErro
return $resultsWithinError;
}

/**
* @param array $sample
* @param array $target
*/
private function trainSample(array $sample, array $target)
{
$this->network->setInput($sample)->getOutput();
$this->sigmas = [];

$sigmas = [];
$layers = $this->network->getLayers();
$layersNumber = count($layers);

for ($i = $layersNumber; $i > 1; --$i) {
foreach ($layers[$i - 1]->getNodes() as $key => $neuron) {
if ($neuron instanceof Neuron) {
$neuronOutput = $neuron->getOutput();
$sigma = $neuronOutput * (1 - $neuronOutput) * ($i == $layersNumber ? ($target[$key] - $neuronOutput) : $this->getPrevSigma($sigmas, $neuron));
$sigmas[] = new Sigma($neuron, $sigma);
$sigma = $this->getSigma($neuron, $target, $key, $i == $layersNumber);
foreach ($neuron->getSynapses() as $synapse) {
$synapse->changeWeight($this->theta * $sigma * $synapse->getNode()->getOutput());
}
Expand All @@ -94,21 +101,40 @@ private function trainSample(array $sample, array $target)
}

/**
* @param Sigma[] $sigmas
* @param Neuron $forNeuron
* @param Neuron $neuron
* @param array $target
* @param int $key
* @param bool $lastLayer
*
* @return float
*/
private function getSigma(Neuron $neuron, array $target, int $key, bool $lastLayer): float
{
$neuronOutput = $neuron->getOutput();
$sigma = $neuronOutput * (1 - $neuronOutput);

if ($lastLayer) {
$sigma *= ($target[$key] - $neuronOutput);
} else {
$sigma *= $this->getPrevSigma($neuron);
}

$this->sigmas[] = new Sigma($neuron, $sigma);

return $sigma;
}

/**
* @param Neuron $neuron
*
* @return float
*/
private function getPrevSigma(array $sigmas, Neuron $forNeuron): float
private function getPrevSigma(Neuron $neuron): float
{
$sigma = 0.0;

foreach ($sigmas as $neuronSigma) {
foreach ($neuronSigma->getNeuron()->getSynapses() as $synapse) {
if ($synapse->getNode() == $forNeuron) {
$sigma += $synapse->getWeight() * $neuronSigma->getSigma();
}
}
foreach ($this->sigmas as $neuronSigma) {
$sigma += $neuronSigma->getSigmaForNeuron($neuron);
}

return $sigma;
Expand Down
18 changes: 18 additions & 0 deletions src/Phpml/NeuralNetwork/Training/Backpropagation/Sigma.php
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,22 @@ public function getSigma()
{
return $this->sigma;
}

/**
* @param Neuron $neuron
*
* @return float
*/
public function getSigmaForNeuron(Neuron $neuron): float
{
$sigma = 0.0;

foreach ($this->neuron->getSynapses() as $synapse) {
if ($synapse->getNode() == $neuron) {
$sigma += $synapse->getWeight() * $this->getSigma();
}
}

return $sigma;
}
}
2 changes: 1 addition & 1 deletion tests/Phpml/NeuralNetwork/Training/BackpropagationTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public function testBackpropagationForXORLearning()
[[1, 0], [0, 1], [1, 1], [0, 0]],
[[1], [1], [0], [0]],
$desiredError = 0.2,
10000
30000
);

$this->assertEquals(0, $network->setInput([1, 1])->getOutput()[0], '', $desiredError);
Expand Down

0 comments on commit c506a84

Please sign in to comment.