Skip to content

Commit

Permalink
Remove phpunit readAttributes deprecated methods (#372)
Browse files Browse the repository at this point in the history
  • Loading branch information
akondas authored Apr 12, 2019
1 parent db82afa commit f6aa1a5
Show file tree
Hide file tree
Showing 6 changed files with 35 additions and 22 deletions.
3 changes: 2 additions & 1 deletion ecs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ services:
PhpCsFixer\Fixer\Operator\BinaryOperatorSpacesFixer:
align_double_arrow: false
align_equals: false

PhpCsFixer\Fixer\PhpUnit\PhpUnitTestCaseStaticMethodCallsFixer:
call_type: 'self'
# phpdoc
PhpCsFixer\Fixer\Phpdoc\PhpdocSeparationFixer: ~
PhpCsFixer\Fixer\Phpdoc\PhpdocAlignFixer: ~
Expand Down
5 changes: 5 additions & 0 deletions src/Helper/Optimizer/Optimizer.php
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,11 @@ public function setTheta(array $theta): self
return $this;
}

public function theta(): array
{
return $this->theta;
}

/**
* Executes the optimization with the given samples & targets
* and returns the weights
Expand Down
10 changes: 10 additions & 0 deletions src/NeuralNetwork/Network/MultilayerPerceptron.php
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,16 @@ public function getOutput(): array
return $result;
}

public function getLearningRate(): float
{
return $this->learningRate;
}

public function getBackpropagation(): Backpropagation
{
return $this->backpropagation;
}

/**
* @param mixed $target
*/
Expand Down
5 changes: 5 additions & 0 deletions src/NeuralNetwork/Training/Backpropagation.php
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,11 @@ public function setLearningRate(float $learningRate): void
$this->learningRate = $learningRate;
}

public function getLearningRate(): float
{
return $this->learningRate;
}

/**
* @param mixed $targetClass
*/
Expand Down
4 changes: 1 addition & 3 deletions tests/Helper/Optimizer/OptimizerTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,7 @@ public function testSetTheta(): void
$optimizer = $this->getMockForAbstractClass(Optimizer::class, [2]);
$object = $optimizer->setTheta([0.3, 1]);

$theta = self::getObjectAttribute($optimizer, 'theta');

self::assertSame($object, $optimizer);
self::assertSame([0.3, 1], $theta);
self::assertSame([0.3, 1], $object->theta());
}
}
30 changes: 12 additions & 18 deletions tests/NeuralNetwork/Network/MultilayerPerceptronTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,12 @@ public function testLearningRateSetter(): void
[5, [3], [0, 1], 1000, null, 0.42]
);

self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.42, $mlp->getLearningRate());
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());

$mlp->setLearningRate(0.24);
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.24, $mlp->getLearningRate());
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
}

public function testLearningRateSetterWithCustomActivationFunctions(): void
Expand All @@ -75,14 +73,12 @@ public function testLearningRateSetterWithCustomActivationFunctions(): void
[5, [[3, $activation_function], [5, $activation_function]], [0, 1], 1000, null, 0.42]
);

self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.42, $mlp->getLearningRate());
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());

$mlp->setLearningRate(0.24);
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.24, $mlp->getLearningRate());
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
}

public function testLearningRateSetterWithLayerObject(): void
Expand All @@ -95,14 +91,12 @@ public function testLearningRateSetterWithLayerObject(): void
[5, [new Layer(3, Neuron::class, $activation_function), new Layer(5, Neuron::class, $activation_function)], [0, 1], 1000, null, 0.42]
);

self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.42, $mlp->getLearningRate());
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());

$mlp->setLearningRate(0.24);
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.24, $mlp->getLearningRate());
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
}

/**
Expand Down

0 comments on commit f6aa1a5

Please sign in to comment.