Commit 0beb407b authored by Marcin Michalski's avatar Marcin Michalski Committed by Arkadiusz Kondas
Browse files

Update easy coding standard to ^5.1 (#317)

parent 9c9705a3
This diff is collapsed.
......@@ -9,7 +9,8 @@ use Phpml\Helper\Trainable;
class Apriori implements Associator
{
use Trainable, Predictable;
use Trainable;
use Predictable;
public const ARRAY_KEY_ANTECEDENT = 'antecedent';
......
......@@ -12,7 +12,8 @@ use Phpml\Math\Statistic\Mean;
class DecisionTree implements Classifier
{
use Trainable, Predictable;
use Trainable;
use Predictable;
public const CONTINUOUS = 1;
......@@ -31,7 +32,7 @@ class DecisionTree implements Classifier
/**
* @var DecisionTreeLeaf
*/
protected $tree = null;
protected $tree;
/**
* @var int
......@@ -219,10 +220,9 @@ class DecisionTree implements Classifier
// Normalize & sort the importances
$total = array_sum($this->featureImportances);
if ($total > 0) {
foreach ($this->featureImportances as &$importance) {
array_walk($this->featureImportances, function (&$importance) use ($total): void {
$importance /= $total;
}
});
arsort($this->featureImportances);
}
......
......@@ -16,7 +16,8 @@ use ReflectionClass;
class AdaBoost implements Classifier
{
use Predictable, Trainable;
use Predictable;
use Trainable;
/**
* Actual labels given in the targets array
......
......@@ -13,7 +13,8 @@ use ReflectionClass;
class Bagging implements Classifier
{
use Trainable, Predictable;
use Trainable;
use Predictable;
/**
* @var int
......
......@@ -16,9 +16,9 @@ class RandomForest extends Bagging
protected $featureSubsetRatio = 'log';
/**
* @var array
* @var array|null
*/
protected $columnNames = null;
protected $columnNames;
/**
* Initializes RandomForest with the given number of trees. More trees
......@@ -53,7 +53,7 @@ class RandomForest extends Bagging
throw new InvalidArgumentException('When a float is given, feature subset ratio should be between 0.1 and 1.0');
}
if (is_string($ratio) && $ratio != 'sqrt' && $ratio != 'log') {
if (is_string($ratio) && $ratio !== 'sqrt' && $ratio !== 'log') {
throw new InvalidArgumentException("When a string is given, feature subset ratio can only be 'sqrt' or 'log'");
}
......@@ -69,7 +69,7 @@ class RandomForest extends Bagging
*/
public function setClassifer(string $classifier, array $classifierOptions = [])
{
if ($classifier != DecisionTree::class) {
if ($classifier !== DecisionTree::class) {
throw new InvalidArgumentException('RandomForest can only use DecisionTree as base classifier');
}
......@@ -100,10 +100,9 @@ class RandomForest extends Bagging
// Normalize & sort the importance values
$total = array_sum($sum);
foreach ($sum as &$importance) {
array_walk($sum, function (&$importance) use ($total): void {
$importance /= $total;
}
});
arsort($sum);
return $sum;
......@@ -131,7 +130,7 @@ class RandomForest extends Bagging
{
if (is_float($this->featureSubsetRatio)) {
$featureCount = (int) ($this->featureSubsetRatio * $this->featureCount);
} elseif ($this->featureSubsetRatio == 'sqrt') {
} elseif ($this->featureSubsetRatio === 'sqrt') {
$featureCount = (int) sqrt($this->featureCount) + 1;
} else {
$featureCount = (int) log($this->featureCount, 2) + 1;
......
......@@ -11,7 +11,8 @@ use Phpml\Math\Distance\Euclidean;
class KNearestNeighbors implements Classifier
{
use Trainable, Predictable;
use Trainable;
use Predictable;
/**
* @var int
......@@ -47,7 +48,7 @@ class KNearestNeighbors implements Classifier
$predictions = array_combine(array_values($this->targets), array_fill(0, count($this->targets), 0));
foreach ($distances as $index => $distance) {
foreach (array_keys($distances) as $index) {
++$predictions[$this->targets[$index]];
}
......
......@@ -13,7 +13,8 @@ use Phpml\Math\Comparison;
class DecisionStump extends WeightedClassifier
{
use Predictable, OneVsRest;
use Predictable;
use OneVsRest;
public const AUTO_SELECT = -1;
......
......@@ -87,10 +87,8 @@ class LogisticRegression extends Adaline
);
}
if ($penalty != '' && strtoupper($penalty) !== 'L2') {
throw new InvalidArgumentException(
"Logistic regression supports only 'L2' regularization"
);
if ($penalty !== '' && strtoupper($penalty) !== 'L2') {
throw new InvalidArgumentException('Logistic regression supports only \'L2\' regularization');
}
$this->learningRate = 0.001;
......@@ -174,7 +172,7 @@ class LogisticRegression extends Adaline
protected function getCostFunction(): Closure
{
$penalty = 0;
if ($this->penalty == 'L2') {
if ($this->penalty === 'L2') {
$penalty = $this->lambda;
}
......@@ -190,7 +188,7 @@ class LogisticRegression extends Adaline
* The gradient of the cost function to be used with gradient descent:
* ∇J(x) = -(y - h(x)) = (h(x) - y)
*/
$callback = function ($weights, $sample, $y) use ($penalty) {
return function ($weights, $sample, $y) use ($penalty) {
$this->weights = $weights;
$hX = $this->output($sample);
......@@ -211,9 +209,6 @@ class LogisticRegression extends Adaline
return [$error, $gradient, $penalty];
};
return $callback;
case 'sse':
/*
* Sum of squared errors or least squared errors cost function:
......@@ -225,7 +220,7 @@ class LogisticRegression extends Adaline
* The gradient of the cost function:
* ∇J(x) = -(h(x) - y) . h(x) . (1 - h(x))
*/
$callback = function ($weights, $sample, $y) use ($penalty) {
return function ($weights, $sample, $y) use ($penalty) {
$this->weights = $weights;
$hX = $this->output($sample);
......@@ -236,9 +231,6 @@ class LogisticRegression extends Adaline
return [$error, $gradient, $penalty];
};
return $callback;
default:
// Not reached
throw new Exception(sprintf('Logistic regression has invalid cost function: %s.', $this->costFunction));
......
......@@ -17,7 +17,8 @@ use Phpml\Preprocessing\Normalizer;
class Perceptron implements Classifier, IncrementalEstimator
{
use Predictable, OneVsRest;
use Predictable;
use OneVsRest;
/**
* @var Optimizer|GD|StochasticGD|null
......
......@@ -11,7 +11,8 @@ use Phpml\Math\Statistic\StandardDeviation;
class NaiveBayes implements Classifier
{
use Trainable, Predictable;
use Trainable;
use Predictable;
public const CONTINUOS = 1;
......
......@@ -23,7 +23,7 @@ class ArrayDataset implements Dataset
*/
public function __construct(array $samples, array $targets)
{
if (count($samples) != count($targets)) {
if (count($samples) !== count($targets)) {
throw new InvalidArgumentException('Size of given arrays does not match');
}
......
......@@ -94,7 +94,7 @@ class SvmDataset extends ArrayDataset
private static function parseFeatureColumn(string $column): array
{
$feature = explode(':', $column, 2);
if (count($feature) != 2) {
if (count($feature) !== 2) {
throw new DatasetException(sprintf('Invalid value "%s".', $column));
}
......
......@@ -120,7 +120,7 @@ class PCA extends EigenTransformerBase
}
// Normalize data
foreach ($data as $i => $row) {
foreach (array_keys($data) as $i) {
for ($k = 0; $k < $n; ++$k) {
$data[$i][$k] -= $this->means[$k];
}
......
......@@ -48,9 +48,9 @@ class TokenCountVectorizer implements Transformer
public function transform(array &$samples): void
{
foreach ($samples as &$sample) {
array_walk($samples, function (string &$sample): void {
$this->transformSample($sample);
}
});
$this->checkDocumentFrequency($samples);
}
......@@ -62,7 +62,7 @@ class TokenCountVectorizer implements Transformer
private function buildVocabulary(array &$samples): void
{
foreach ($samples as $index => $sample) {
foreach ($samples as $sample) {
$tokens = $this->tokenizer->tokenize($sample);
foreach ($tokens as $token) {
$this->addTokenToVocabulary($token);
......
......@@ -43,7 +43,7 @@ final class UnivariateLinearRegression implements ScoringFunction
}
$correlations = [];
foreach ($samples[0] as $index => $feature) {
foreach (array_keys($samples[0]) as $index) {
$featureColumn = array_column($samples, $index);
$correlations[$index] =
(Matrix::dot($targets, $featureColumn)[0] / (new Matrix($featureColumn, false))->transpose()->frobeniusNorm())
......@@ -57,15 +57,15 @@ final class UnivariateLinearRegression implements ScoringFunction
}, $correlations);
}
private function centerTargets(&$targets): void
private function centerTargets(array &$targets): void
{
$mean = Mean::arithmetic($targets);
foreach ($targets as &$target) {
array_walk($targets, function (&$target) use ($mean): void {
$target -= $mean;
}
});
}
private function centerSamples(&$samples): void
private function centerSamples(array &$samples): void
{
$means = [];
foreach ($samples[0] as $index => $feature) {
......
......@@ -179,7 +179,7 @@ class EigenvalueDecomposition
continue;
}
$o = ($this->e[$i] > 0) ? $i + 1 : $i - 1;
$o = $this->e[$i] > 0 ? $i + 1 : $i - 1;
$D[$i][$o] = $this->e[$i];
}
......@@ -222,7 +222,7 @@ class EigenvalueDecomposition
}
$this->e[$i] = $scale * $g;
$h = $h - $f * $g;
$h -= $f * $g;
$this->d[$i_] = $f - $g;
for ($j = 0; $j < $i; ++$j) {
......@@ -395,7 +395,7 @@ class EigenvalueDecomposition
} while (abs($this->e[$l]) > $eps * $tst1);
}
$this->d[$l] = $this->d[$l] + $f;
$this->d[$l] += $f;
$this->e[$l] = 0.0;
}
......@@ -439,7 +439,7 @@ class EigenvalueDecomposition
// Scale column.
$scale = 0.0;
for ($i = $m; $i <= $high; ++$i) {
$scale = $scale + abs($this->H[$i][$m - 1]);
$scale += abs($this->H[$i][$m - 1]);
}
if ($scale != 0.0) {
......@@ -477,7 +477,7 @@ class EigenvalueDecomposition
$f += $this->ort[$j] * $this->H[$i][$j];
}
$f = $f / $h;
$f /= $h;
for ($j = $m; $j <= $high; ++$j) {
$this->H[$i][$j] -= $f * $this->ort[$j];
}
......@@ -568,7 +568,7 @@ class EigenvalueDecomposition
}
for ($j = max($i - 1, 0); $j < $nn; ++$j) {
$norm = $norm + abs($this->H[$i][$j]);
$norm += abs($this->H[$i][$j]);
}
}
......@@ -593,7 +593,7 @@ class EigenvalueDecomposition
// Check for convergence
// One root found
if ($l == $n) {
$this->H[$n][$n] = $this->H[$n][$n] + $exshift;
$this->H[$n][$n] += $exshift;
$this->d[$n] = $this->H[$n][$n];
$this->e[$n] = 0.0;
--$n;
......@@ -604,8 +604,8 @@ class EigenvalueDecomposition
$p = ($this->H[$n - 1][$n - 1] - $this->H[$n][$n]) / 2.0;
$q = $p * $p + $w;
$z = sqrt(abs($q));
$this->H[$n][$n] = $this->H[$n][$n] + $exshift;
$this->H[$n - 1][$n - 1] = $this->H[$n - 1][$n - 1] + $exshift;
$this->H[$n][$n] += $exshift;
$this->H[$n - 1][$n - 1] += $exshift;
$x = $this->H[$n][$n];
// Real pair
if ($q >= 0) {
......@@ -628,8 +628,8 @@ class EigenvalueDecomposition
$p = $x / $s;
$q = $z / $s;
$r = sqrt($p * $p + $q * $q);
$p = $p / $r;
$q = $q / $r;
$p /= $r;
$q /= $r;
// Row modification
for ($j = $n - 1; $j < $nn; ++$j) {
$z = $this->H[$n - 1][$j];
......@@ -659,7 +659,7 @@ class EigenvalueDecomposition
$this->e[$n] = -$z;
}
$n = $n - 2;
$n -= 2;
$iter = 0;
// No convergence yet
} else {
......@@ -687,7 +687,7 @@ class EigenvalueDecomposition
// MATLAB's new ad hoc shift
if ($iter == 30) {
$s = ($y - $x) / 2.0;
$s = $s * $s + $w;
$s *= $s + $w;
if ($s > 0) {
$s = sqrt($s);
if ($y < $x) {
......@@ -705,7 +705,7 @@ class EigenvalueDecomposition
}
// Could check iteration count here.
$iter = $iter + 1;
++$iter;
// Look for two consecutive small sub-diagonal elements
$m = $n - 2;
while ($m >= $l) {
......@@ -716,9 +716,9 @@ class EigenvalueDecomposition
$q = $this->H[$m + 1][$m + 1] - $z - $r - $s;
$r = $this->H[$m + 2][$m + 1];
$s = abs($p) + abs($q) + abs($r);
$p = $p / $s;
$q = $q / $s;
$r = $r / $s;
$p /= $s;
$q /= $s;
$r /= $s;
if ($m == $l) {
break;
}
......@@ -747,9 +747,9 @@ class EigenvalueDecomposition
$r = ($notlast ? $this->H[$k + 2][$k - 1] : 0.0);
$x = abs($p) + abs($q) + abs($r);
if ($x != 0.0) {
$p = $p / $x;
$q = $q / $x;
$r = $r / $x;
$p /= $x;
$q /= $x;
$r /= $x;
}
}
......@@ -769,46 +769,46 @@ class EigenvalueDecomposition
$this->H[$k][$k - 1] = -$this->H[$k][$k - 1];
}
$p = $p + $s;
$p += $s;
$x = $p / $s;
$y = $q / $s;
$z = $r / $s;
$q = $q / $p;
$r = $r / $p;
$q /= $p;
$r /= $p;
// Row modification
for ($j = $k; $j < $nn; ++$j) {
$p = $this->H[$k][$j] + $q * $this->H[$k + 1][$j];
if ($notlast) {
$p = $p + $r * $this->H[$k + 2][$j];
$this->H[$k + 2][$j] = $this->H[$k + 2][$j] - $p * $z;
$p += $r * $this->H[$k + 2][$j];
$this->H[$k + 2][$j] -= $p * $z;
}
$this->H[$k][$j] = $this->H[$k][$j] - $p * $x;
$this->H[$k + 1][$j] = $this->H[$k + 1][$j] - $p * $y;
$this->H[$k][$j] -= $p * $x;
$this->H[$k + 1][$j] -= $p * $y;
}
// Column modification
for ($i = 0; $i <= min($n, $k + 3); ++$i) {
$p = $x * $this->H[$i][$k] + $y * $this->H[$i][$k + 1];
if ($notlast) {
$p = $p + $z * $this->H[$i][$k + 2];
$this->H[$i][$k + 2] = $this->H[$i][$k + 2] - $p * $r;
$p += $z * $this->H[$i][$k + 2];
$this->H[$i][$k + 2] -= $p * $r;
}
$this->H[$i][$k] = $this->H[$i][$k] - $p;
$this->H[$i][$k + 1] = $this->H[$i][$k + 1] - $p * $q;
$this->H[$i][$k] -= $p;
$this->H[$i][$k + 1] -= $p * $q;
}
// Accumulate transformations
for ($i = $low; $i <= $high; ++$i) {
$p = $x * $this->V[$i][$k] + $y * $this->V[$i][$k + 1];
if ($notlast) {
$p = $p + $z * $this->V[$i][$k + 2];
$this->V[$i][$k + 2] = $this->V[$i][$k + 2] - $p * $r;
$p += $z * $this->V[$i][$k + 2];
$this->V[$i][$k + 2] -= $p * $r;
}
$this->V[$i][$k] = $this->V[$i][$k] - $p;
$this->V[$i][$k + 1] = $this->V[$i][$k + 1] - $p * $q;
$this->V[$i][$k] -= $p;
$this->V[$i][$k + 1] -= $p * $q;
}
} // ($s != 0)
} // k loop
......@@ -831,7 +831,7 @@ class EigenvalueDecomposition
$w = $this->H[$i][$i] - $p;
$r = 0.0;
for ($j = $l; $j <= $n; ++$j) {
$r = $r + $this->H[$i][$j] * $this->H[$j][$n];
$r += $this->H[$i][$j] * $this->H[$j][$n];
}
if ($this->e[$i] < 0.0) {
......@@ -864,7 +864,7 @@ class EigenvalueDecomposition
$t = abs($this->H[$i][$n]);
if (($eps * $t) * $t > 1) {
for ($j = $i; $j <= $n; ++$j) {
$this->H[$j][$n] = $this->H[$j][$n] / $t;
$this->H[$j][$n] /= $t;
}
}
}
......@@ -890,8 +890,8 @@ class EigenvalueDecomposition
$ra = 0.0;
$sa = 0.0;
for ($j = $l; $j <= $n; ++$j) {
$ra = $ra + $this->H[$i][$j] * $this->H[$j][$n - 1];
$sa = $sa + $this->H[$i][$j] * $this->H[$j][$n];
$ra += $this->H[$i][$j] * $this->H[$j][$n - 1];
$sa += $this->H[$i][$j] * $this->H[$j][$n];
}
$w = $this->H[$i][$i] - $p;
......@@ -932,8 +932,8 @@ class EigenvalueDecomposition
$t = max(abs($this->H[$i][$n - 1]), abs($this->H[$i][$n]));
if (($eps * $t) * $t > 1) {
for ($j = $i; $j <= $n; ++$j) {
$this->H[$j][$n - 1] = $this->H[$j][$n - 1] / $t;
$this->H[$j][$n] = $this->H[$j][$n] / $t;
$this->H[$j][$n - 1] /= $t;
$this->H[$j][$n] /= $t;
}
}
} // end else
......@@ -955,7 +955,7 @@ class EigenvalueDecomposition
for ($i = $low; $i <= $high; ++$i) {
$z = 0.0;
for ($k = $low; $k <= min($j, $high); ++$k) {
$z = $z + $this->V[$i][$k] * $this->H[$k][$j];
$z += $this->V[$i][$k] * $this->H[$k][$j];
}
$this->V[$i][$j] = $z;
......
......@@ -133,7 +133,7 @@ class LUDecomposition
$k = $this->piv[$p];
$this->piv[$p] = $this->piv[$j];
$this->piv[$j] = $k;
$this->pivsign = $this->pivsign * -1;
$this->pivsign *= -1;
}
// Compute multipliers.
......
......@@ -261,7 +261,7 @@ class Matrix
$squareSum = 0;
for ($i = 0; $i < $this->rows; ++$i) {
for ($j = 0; $j < $this->columns; ++$j) {
$squareSum += ($this->matrix[$i][$j]) ** 2;
$squareSum += $this->matrix[$i][$j] ** 2;
}
}