Skip to content

Commit

Permalink
Add ThresholdedReLU activation function (#129)
Browse files Browse the repository at this point in the history
  • Loading branch information
marmichalski authored and akondas committed Sep 2, 2017
1 parent cacfd64 commit 0e59cfb
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 0 deletions.
33 changes: 33 additions & 0 deletions src/Phpml/NeuralNetwork/ActivationFunction/ThresholdedReLU.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
<?php

declare(strict_types=1);

namespace Phpml\NeuralNetwork\ActivationFunction;

use Phpml\NeuralNetwork\ActivationFunction;

class ThresholdedReLU implements ActivationFunction
{
/**
* @var float
*/
private $theta;

/**
* @param float $theta
*/
public function __construct($theta = 1.0)
{
$this->theta = $theta;
}

/**
* @param float|int $value
*
* @return float
*/
public function compute($value): float
{
return $value > $this->theta ? $value : 0.0;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
<?php

declare(strict_types=1);

namespace tests\Phpml\NeuralNetwork\ActivationFunction;

use Phpml\NeuralNetwork\ActivationFunction\ThresholdedReLU;
use PHPUnit\Framework\TestCase;

class ThresholdedReLUTest extends TestCase
{
/**
* @param $theta
* @param $expected
* @param $value
*
* @dataProvider thresholdProvider
*/
public function testThresholdedReLUActivationFunction($theta, $expected, $value)
{
$thresholdedReLU = new ThresholdedReLU($theta);

$this->assertEquals($expected, $thresholdedReLU->compute($value));
}

/**
* @return array
*/
public function thresholdProvider()
{
return [
[1.0, 0, 1.0],
[0.5, 3.75, 3.75],
[0.0, 0.5, 0.5],
[0.9, 0, 0.1]
];
}
}

0 comments on commit 0e59cfb

Please sign in to comment.