Skip to content

Commit

Permalink
Add PReLU activation function (#128)
Browse files Browse the repository at this point in the history
* Implement RELU activation functions

* Add PReLUTest
  • Loading branch information
marmichalski authored and akondas committed Sep 2, 2017
1 parent 0e59cfb commit b1be057
Show file tree
Hide file tree
Showing 2 changed files with 72 additions and 0 deletions.
33 changes: 33 additions & 0 deletions src/Phpml/NeuralNetwork/ActivationFunction/PReLU.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
<?php

declare(strict_types=1);

namespace Phpml\NeuralNetwork\ActivationFunction;

use Phpml\NeuralNetwork\ActivationFunction;

class PReLU implements ActivationFunction
{
/**
* @var float
*/
private $beta;

/**
* @param float $beta
*/
public function __construct($beta = 0.01)
{
$this->beta = $beta;
}

/**
* @param float|int $value
*
* @return float
*/
public function compute($value): float
{
return $value >= 0 ? $value : $this->beta * $value;
}
}
39 changes: 39 additions & 0 deletions tests/Phpml/NeuralNetwork/ActivationFunction/PReLUTest.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
<?php

declare(strict_types=1);

namespace tests\Phpml\NeuralNetwork\ActivationFunction;

use Phpml\NeuralNetwork\ActivationFunction\PReLU;
use PHPUnit\Framework\TestCase;

class PReLUTest extends TestCase
{
/**
* @param $beta
* @param $expected
* @param $value
*
* @dataProvider preluProvider
*/
public function testPReLUActivationFunction($beta, $expected, $value)
{
$prelu = new PReLU($beta);

$this->assertEquals($expected, $prelu->compute($value), '', 0.001);
}

/**
* @return array
*/
public function preluProvider()
{
return [
[0.01, 0.367, 0.367],
[0.0, 1, 1],
[0.3, -0.3, -1],
[0.9, 3, 3],
[0.02, -0.06, -3],
];
}
}

0 comments on commit b1be057

Please sign in to comment.