Unverified Commit f6aa1a59 authored by Arkadiusz Kondas's avatar Arkadiusz Kondas Committed by GitHub
Browse files

Remove phpunit readAttributes deprecated methods (#372)

parent db82afa2
......@@ -18,7 +18,8 @@ services:
PhpCsFixer\Fixer\Operator\BinaryOperatorSpacesFixer:
align_double_arrow: false
align_equals: false
PhpCsFixer\Fixer\PhpUnit\PhpUnitTestCaseStaticMethodCallsFixer:
call_type: 'self'
# phpdoc
PhpCsFixer\Fixer\Phpdoc\PhpdocSeparationFixer: ~
PhpCsFixer\Fixer\Phpdoc\PhpdocAlignFixer: ~
......
......@@ -48,6 +48,11 @@ abstract class Optimizer
return $this;
}
public function theta(): array
{
return $this->theta;
}
/**
* Executes the optimization with the given samples & targets
* and returns the weights
......
......@@ -129,6 +129,16 @@ abstract class MultilayerPerceptron extends LayeredNetwork implements Estimator,
return $result;
}
public function getLearningRate(): float
{
return $this->learningRate;
}
public function getBackpropagation(): Backpropagation
{
return $this->backpropagation;
}
/**
* @param mixed $target
*/
......
......@@ -34,6 +34,11 @@ class Backpropagation
$this->learningRate = $learningRate;
}
public function getLearningRate(): float
{
return $this->learningRate;
}
/**
* @param mixed $targetClass
*/
......
......@@ -26,9 +26,7 @@ class OptimizerTest extends TestCase
$optimizer = $this->getMockForAbstractClass(Optimizer::class, [2]);
$object = $optimizer->setTheta([0.3, 1]);
$theta = self::getObjectAttribute($optimizer, 'theta');
self::assertSame($object, $optimizer);
self::assertSame([0.3, 1], $theta);
self::assertSame([0.3, 1], $object->theta());
}
}
......@@ -55,14 +55,12 @@ class MultilayerPerceptronTest extends TestCase
[5, [3], [0, 1], 1000, null, 0.42]
);
self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.42, $mlp->getLearningRate());
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());
$mlp->setLearningRate(0.24);
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.24, $mlp->getLearningRate());
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
}
public function testLearningRateSetterWithCustomActivationFunctions(): void
......@@ -75,14 +73,12 @@ class MultilayerPerceptronTest extends TestCase
[5, [[3, $activation_function], [5, $activation_function]], [0, 1], 1000, null, 0.42]
);
self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.42, $mlp->getLearningRate());
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());
$mlp->setLearningRate(0.24);
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.24, $mlp->getLearningRate());
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
}
public function testLearningRateSetterWithLayerObject(): void
......@@ -95,14 +91,12 @@ class MultilayerPerceptronTest extends TestCase
[5, [new Layer(3, Neuron::class, $activation_function), new Layer(5, Neuron::class, $activation_function)], [0, 1], 1000, null, 0.42]
);
self::assertEquals(0.42, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.42, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.42, $mlp->getLearningRate());
self::assertEquals(0.42, $mlp->getBackpropagation()->getLearningRate());
$mlp->setLearningRate(0.24);
self::assertEquals(0.24, self::readAttribute($mlp, 'learningRate'));
$backprop = self::readAttribute($mlp, 'backpropagation');
self::assertEquals(0.24, self::readAttribute($backprop, 'learningRate'));
self::assertEquals(0.24, $mlp->getLearningRate());
self::assertEquals(0.24, $mlp->getBackpropagation()->getLearningRate());
}
/**
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment