Skip to content

Commit

Permalink
One-v-Rest Classification technique applied to linear classifiers (#54)
Browse files Browse the repository at this point in the history
* One-v-Rest Classification technique applied to linear classifiers

* Fix for Apriori

* Fixes for One-v-Rest

* One-v-Rest test cases
  • Loading branch information
MustafaKarabulut authored and akondas committed Mar 5, 2017
1 parent 63c63df commit 01bb82a
Show file tree
Hide file tree
Showing 9 changed files with 409 additions and 50 deletions.
5 changes: 0 additions & 5 deletions src/Phpml/Classification/DecisionTree.php
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,6 @@ class DecisionTree implements Classifier
const CONTINUOS = 1;
const NOMINAL = 2;

/**
* @var array
*/
private $samples = [];

/**
* @var array
*/
Expand Down
135 changes: 101 additions & 34 deletions src/Phpml/Classification/Linear/DecisionStump.php
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
namespace Phpml\Classification\Linear;

use Phpml\Helper\Predictable;
use Phpml\Helper\Trainable;
use Phpml\Helper\OneVsRest;
use Phpml\Classification\WeightedClassifier;
use Phpml\Classification\DecisionTree;

class DecisionStump extends WeightedClassifier
{
use Trainable, Predictable;
use Predictable, OneVsRest;

const AUTO_SELECT = -1;

Expand All @@ -20,6 +20,10 @@ class DecisionStump extends WeightedClassifier
*/
protected $givenColumnIndex;

/**
* @var array
*/
protected $binaryLabels;

/**
* Sample weights : If used the optimization on the decision value
Expand Down Expand Up @@ -57,10 +61,22 @@ class DecisionStump extends WeightedClassifier
*/
protected $columnTypes;

/**
* @var int
*/
protected $featureCount;

/**
* @var float
*/
protected $numSplitCount = 10.0;
protected $numSplitCount = 100.0;

/**
* Distribution of samples in the leaves
*
* @var array
*/
protected $prob;

/**
* A DecisionStump classifier is a one-level deep DecisionTree. It is generally
Expand All @@ -81,20 +97,15 @@ public function __construct(int $columnIndex = self::AUTO_SELECT)
* @param array $samples
* @param array $targets
*/
public function train(array $samples, array $targets)
protected function trainBinary(array $samples, array $targets)
{
$this->samples = array_merge($this->samples, $samples);
$this->targets = array_merge($this->targets, $targets);

// DecisionStump is capable of classifying between two classes only
$labels = array_count_values($this->targets);
$this->labels = array_keys($labels);
if (count($this->labels) != 2) {
throw new \Exception("DecisionStump can classify between two classes only:" . implode(',', $this->labels));
}
$this->binaryLabels = array_keys(array_count_values($this->targets));
$this->featureCount = count($this->samples[0]);

// If a column index is given, it should be among the existing columns
if ($this->givenColumnIndex > count($samples[0]) - 1) {
if ($this->givenColumnIndex > count($this->samples[0]) - 1) {
$this->givenColumnIndex = self::AUTO_SELECT;
}

Expand All @@ -106,22 +117,23 @@ public function train(array $samples, array $targets)
throw new \Exception("Number of sample weights does not match with number of samples");
}
} else {
$this->weights = array_fill(0, count($samples), 1);
$this->weights = array_fill(0, count($this->samples), 1);
}

// Determine type of each column as either "continuous" or "nominal"
$this->columnTypes = DecisionTree::getColumnTypes($this->samples);

// Try to find the best split in the columns of the dataset
// by calculating error rate for each split point in each column
$columns = range(0, count($samples[0]) - 1);
$columns = range(0, count($this->samples[0]) - 1);
if ($this->givenColumnIndex != self::AUTO_SELECT) {
$columns = [$this->givenColumnIndex];
}

$bestSplit = [
'value' => 0, 'operator' => '',
'column' => 0, 'trainingErrorRate' => 1.0];
'prob' => [], 'column' => 0,
'trainingErrorRate' => 1.0];
foreach ($columns as $col) {
if ($this->columnTypes[$col] == DecisionTree::CONTINUOS) {
$split = $this->getBestNumericalSplit($col);
Expand Down Expand Up @@ -164,6 +176,10 @@ public function setNumericalSplitCount(float $count)
protected function getBestNumericalSplit(int $col)
{
$values = array_column($this->samples, $col);
// Trying all possible points may be accomplished in two general ways:
// 1- Try all values in the $samples array ($values)
// 2- Artificially split the range of values into several parts and try them
// We choose the second one because it is faster in larger datasets
$minValue = min($values);
$maxValue = max($values);
$stepSize = ($maxValue - $minValue) / $this->numSplitCount;
Expand All @@ -174,19 +190,21 @@ protected function getBestNumericalSplit(int $col)
// Before trying all possible split points, let's first try
// the average value for the cut point
$threshold = array_sum($values) / (float) count($values);
$errorRate = $this->calculateErrorRate($threshold, $operator, $values);
list($errorRate, $prob) = $this->calculateErrorRate($threshold, $operator, $values);
if ($split == null || $errorRate < $split['trainingErrorRate']) {
$split = ['value' => $threshold, 'operator' => $operator,
'column' => $col, 'trainingErrorRate' => $errorRate];
'prob' => $prob, 'column' => $col,
'trainingErrorRate' => $errorRate];
}

// Try other possible points one by one
for ($step = $minValue; $step <= $maxValue; $step+= $stepSize) {
$threshold = (float)$step;
$errorRate = $this->calculateErrorRate($threshold, $operator, $values);
list($errorRate, $prob) = $this->calculateErrorRate($threshold, $operator, $values);
if ($errorRate < $split['trainingErrorRate']) {
$split = ['value' => $threshold, 'operator' => $operator,
'column' => $col, 'trainingErrorRate' => $errorRate];
'prob' => $prob, 'column' => $col,
'trainingErrorRate' => $errorRate];
}
}// for
}
Expand All @@ -210,11 +228,12 @@ protected function getBestNominalSplit(int $col)

foreach (['=', '!='] as $operator) {
foreach ($distinctVals as $val) {
$errorRate = $this->calculateErrorRate($val, $operator, $values);
list($errorRate, $prob) = $this->calculateErrorRate($val, $operator, $values);

if ($split == null || $split['trainingErrorRate'] < $errorRate) {
$split = ['value' => $val, 'operator' => $operator,
'column' => $col, 'trainingErrorRate' => $errorRate];
'prob' => $prob, 'column' => $col,
'trainingErrorRate' => $errorRate];
}
}// for
}
Expand All @@ -238,9 +257,9 @@ protected function evaluate($leftValue, $operator, $rightValue)
case '>=': return $leftValue >= $rightValue;
case '<': return $leftValue < $rightValue;
case '<=': return $leftValue <= $rightValue;
case '=': return $leftValue == $rightValue;
case '=': return $leftValue === $rightValue;
case '!=':
case '<>': return $leftValue != $rightValue;
case '<>': return $leftValue !== $rightValue;
}

return false;
Expand All @@ -253,42 +272,90 @@ protected function evaluate($leftValue, $operator, $rightValue)
* @param float $threshold
* @param string $operator
* @param array $values
*
* @return array
*/
protected function calculateErrorRate(float $threshold, string $operator, array $values)
{
$total = (float) array_sum($this->weights);
$wrong = 0.0;
$leftLabel = $this->labels[0];
$rightLabel= $this->labels[1];
$prob = [];
$leftLabel = $this->binaryLabels[0];
$rightLabel= $this->binaryLabels[1];

foreach ($values as $index => $value) {
if ($this->evaluate($threshold, $operator, $value)) {
if ($this->evaluate($value, $operator, $threshold)) {
$predicted = $leftLabel;
} else {
$predicted = $rightLabel;
}

if ($predicted != $this->targets[$index]) {
$target = $this->targets[$index];
if (strval($predicted) != strval($this->targets[$index])) {
$wrong += $this->weights[$index];
}

if (! isset($prob[$predicted][$target])) {
$prob[$predicted][$target] = 0;
}
$prob[$predicted][$target]++;
}

return $wrong / $total;
// Calculate probabilities: Proportion of labels in each leaf
$dist = array_combine($this->binaryLabels, array_fill(0, 2, 0.0));
foreach ($prob as $leaf => $counts) {
$leafTotal = (float)array_sum($prob[$leaf]);
foreach ($counts as $label => $count) {
if (strval($leaf) == strval($label)) {
$dist[$leaf] = $count / $leafTotal;
}
}
}

return [$wrong / (float) array_sum($this->weights), $dist];
}

/**
* Returns the probability of the sample of belonging to the given label
*
* Probability of a sample is calculated as the proportion of the label
* within the labels of the training samples in the decision node
*
* @param array $sample
* @param mixed $label
*
* @return float
*/
protected function predictProbability(array $sample, $label)
{
$predicted = $this->predictSampleBinary($sample);
if (strval($predicted) == strval($label)) {
return $this->prob[$label];
}

return 0.0;
}

/**
* @param array $sample
*
* @return mixed
*/
protected function predictSample(array $sample)
protected function predictSampleBinary(array $sample)
{
if ($this->evaluate($this->value, $this->operator, $sample[$this->column])) {
return $this->labels[0];
if ($this->evaluate($sample[$this->column], $this->operator, $this->value)) {
return $this->binaryLabels[0];
}
return $this->labels[1];

return $this->binaryLabels[1];
}

/**
* @return string
*/
public function __toString()
{
return "$this->column $this->operator $this->value";
return "IF $this->column $this->operator $this->value " .
"THEN " . $this->binaryLabels[0] . " ".
"ELSE " . $this->binaryLabels[1];
}
}
55 changes: 46 additions & 9 deletions src/Phpml/Classification/Linear/Perceptron.php
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@
namespace Phpml\Classification\Linear;

use Phpml\Helper\Predictable;
use Phpml\Helper\OneVsRest;
use Phpml\Classification\Classifier;
use Phpml\Preprocessing\Normalizer;

class Perceptron implements Classifier
{
use Predictable;
use Predictable, OneVsRest;

/**
* The function whose result will be used to calculate the network error
Expand Down Expand Up @@ -114,7 +115,7 @@ public function setChangeThreshold(float $threshold = 1e-5)
* @param array $samples
* @param array $targets
*/
public function train(array $samples, array $targets)
public function trainBinary(array $samples, array $targets)
{
$this->labels = array_keys(array_count_values($targets));
if (count($this->labels) > 2) {
Expand All @@ -128,7 +129,7 @@ public function train(array $samples, array $targets)
// Set all target values to either -1 or 1
$this->labels = [1 => $this->labels[0], -1 => $this->labels[1]];
foreach ($targets as $target) {
$this->targets[] = $target == $this->labels[1] ? 1 : -1;
$this->targets[] = strval($target) == strval($this->labels[1]) ? 1 : -1;
}

// Set samples and feature count vars
Expand Down Expand Up @@ -213,6 +214,25 @@ function ($w1, $w2) {
return false;
}

/**
* Checks if the sample should be normalized and if so, returns the
* normalized sample
*
* @param array $sample
*
* @return array
*/
protected function checkNormalizedSample(array $sample)
{
if ($this->normalizer) {
$samples = [$sample];
$this->normalizer->transform($samples);
$sample = $samples[0];
}

return $sample;
}

/**
* Calculates net output of the network as a float value for the given input
*
Expand Down Expand Up @@ -244,17 +264,34 @@ protected function outputClass(array $sample)
return $this->output($sample) > 0 ? 1 : -1;
}

/**
* Returns the probability of the sample of belonging to the given label.
*
* The probability is simply taken as the distance of the sample
* to the decision plane.
*
* @param array $sample
* @param mixed $label
*/
protected function predictProbability(array $sample, $label)
{
$predicted = $this->predictSampleBinary($sample);

if (strval($predicted) == strval($label)) {
$sample = $this->checkNormalizedSample($sample);
return abs($this->output($sample));
}

return 0.0;
}

/**
* @param array $sample
* @return mixed
*/
protected function predictSample(array $sample)
protected function predictSampleBinary(array $sample)
{
if ($this->normalizer) {
$samples = [$sample];
$this->normalizer->transform($samples);
$sample = $samples[0];
}
$sample = $this->checkNormalizedSample($sample);

$predictedClass = $this->outputClass($sample);

Expand Down
Loading

0 comments on commit 01bb82a

Please sign in to comment.