Skip to content

Commit c058f14

Browse files
authored
Merge pull request #389 from apphp/388-convert-parameter-class-to-numpower
388 convert parameter class to numpower
2 parents 6a09905 + c74c5fa commit c058f14

File tree

6 files changed

+330
-4
lines changed

6 files changed

+330
-4
lines changed
Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
<span style="float:right;"><a href="https://github.com/RubixML/ML/blob/master/src/NeuralNet/Optimizers/Stochastic.php">[source]</a></span>
1+
<span style="float:right;"><a href="https://github.com/RubixML/ML/blob/master/src/NeuralNet/Optimizers/Stochastic/Stochastic.php">[source]</a></span>
22

33
# Stochastic
4-
A constant learning rate optimizer based on vanilla Stochastic Gradient Descent.
4+
A constant learning rate optimizer based on vanilla Stochastic Gradient Descent (SGD).
55

66
## Parameters
77
| # | Name | Default | Type | Description |
@@ -10,7 +10,7 @@ A constant learning rate optimizer based on vanilla Stochastic Gradient Descent.
1010

1111
## Example
1212
```php
13-
use Rubix\ML\NeuralNet\Optimizers\Stochastic;
13+
use Rubix\ML\NeuralNet\Optimizers\Stochastic\Stochastic;
1414

1515
$optimizer = new Stochastic(0.01);
16-
```
16+
```
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
<?php
2+
3+
namespace Rubix\ML\NeuralNet\Optimizers\Base;
4+
5+
use NDArray;
6+
use Rubix\ML\NeuralNet\Parameters\Parameter;
7+
use Stringable;
8+
9+
/**
10+
* Optimizer
11+
*
12+
* @category Machine Learning
13+
* @package Rubix/ML
14+
* @author Andrew DalPino
15+
* @author Samuel Akopyan <[email protected]>
16+
*/
17+
interface Optimizer extends Stringable
18+
{
19+
/**
20+
* Take a step of gradient descent for a given parameter.
21+
*
22+
* @internal
23+
*
24+
* @param Parameter $param
25+
* @param NDArray $gradient
26+
* @return NDArray
27+
*/
28+
public function step(Parameter $param, NDArray $gradient) : NDArray;
29+
}
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
<?php
2+
3+
namespace Rubix\ML\NeuralNet\Optimizers\Stochastic;
4+
5+
use NDArray;
6+
use NumPower;
7+
use Rubix\ML\NeuralNet\Optimizers\Base\Optimizer;
8+
use Rubix\ML\NeuralNet\Parameters\Parameter;
9+
use Rubix\ML\Exceptions\InvalidArgumentException;
10+
11+
/**
12+
* Stochastic
13+
*
14+
* SGD (Stochastic Gradient Descent) optimizer -
15+
* a constant learning rate gradient descent optimizer.
16+
*
17+
* @category Machine Learning
18+
* @package Rubix/ML
19+
* @author Andrew DalPino
20+
* @author Samuel Akopyan <[email protected]>
21+
*/
22+
class Stochastic implements Optimizer
23+
{
24+
/**
25+
* The learning rate that controls the global step size.
26+
*
27+
* @var float
28+
*/
29+
protected float $rate;
30+
31+
/**
32+
* @param float $rate
33+
* @throws InvalidArgumentException
34+
*/
35+
public function __construct(float $rate = 0.01)
36+
{
37+
if ($rate <= 0.0) {
38+
throw new InvalidArgumentException("Learning rate must be greater than 0, $rate given.");
39+
}
40+
41+
$this->rate = $rate;
42+
}
43+
44+
/**
45+
* Take a step of gradient descent for a given parameter.
46+
*
47+
* @internal
48+
*
49+
* @param Parameter $param
50+
* @param NDArray $gradient
51+
* @return NDArray
52+
*/
53+
public function step(Parameter $param, NDArray $gradient) : NDArray
54+
{
55+
return NumPower::multiply($gradient, $this->rate);
56+
}
57+
58+
/**
59+
* Return the string representation of the object.
60+
*
61+
* @internal
62+
*
63+
* @return string
64+
*/
65+
public function __toString() : string
66+
{
67+
return "Stochastic (rate: {$this->rate})";
68+
}
69+
}
Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
<?php
2+
3+
namespace Rubix\ML\NeuralNet\Parameters;
4+
5+
use NDArray;
6+
use NumPower;
7+
use Rubix\ML\NeuralNet\Optimizers\Base\Optimizer;
8+
9+
/**
10+
* Parameter
11+
*
12+
* A wrapper over an NDArray from NumPower that marks the parameter as trainable
13+
* and provides updates via the optimizer.
14+
*
15+
* @internal
16+
*
17+
* @category Machine Learning
18+
* @package Rubix/ML
19+
* @author Andrew DalPino
20+
* @author Samuel Akopyan <[email protected]>
21+
*/
22+
23+
/**
24+
* Parameter
25+
*
26+
*/
27+
class Parameter
28+
{
29+
/**
30+
* The auto incrementing id.
31+
*
32+
* @var int
33+
*/
34+
protected static int $counter = 0;
35+
36+
/**
37+
* The unique identifier of the parameter.
38+
*
39+
* @var int
40+
*/
41+
protected int $id;
42+
43+
/**
44+
* The parameter.
45+
*
46+
* @var NDArray
47+
*/
48+
protected NDArray $param;
49+
50+
/**
51+
* @param NDArray $param
52+
*/
53+
public function __construct(NDArray $param)
54+
{
55+
$this->id = self::$counter++;
56+
$this->param = $param;
57+
}
58+
59+
/**
60+
* Return the unique identifier of the parameter.
61+
*
62+
* @return int
63+
*/
64+
public function id(): int
65+
{
66+
return $this->id;
67+
}
68+
69+
/**
70+
* Return the wrapped parameter.
71+
*
72+
* @return NDArray
73+
*/
74+
public function param(): NDArray
75+
{
76+
return $this->param;
77+
}
78+
79+
/**
80+
* Update the parameter with the gradient and optimizer.
81+
*
82+
* @param NDArray $gradient
83+
* @param Optimizer $optimizer
84+
*/
85+
public function update(NDArray $gradient, Optimizer $optimizer): void
86+
{
87+
$step = $optimizer->step($this, $gradient);
88+
89+
$this->param = NumPower::subtract($this->param, $step);
90+
}
91+
92+
/**
93+
* Perform a deep copy of the object upon cloning.
94+
*/
95+
public function __clone(): void
96+
{
97+
$this->param = clone $this->param;
98+
}
99+
}
Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
<?php
2+
3+
declare(strict_types=1);
4+
5+
namespace Rubix\ML\Tests\NeuralNet\Optimizers\Stochastic;
6+
7+
use Generator;
8+
use NDArray;
9+
use NumPower;
10+
use PHPUnit\Framework\Attributes\CoversClass;
11+
use PHPUnit\Framework\Attributes\DataProvider;
12+
use PHPUnit\Framework\Attributes\Group;
13+
use PHPUnit\Framework\Attributes\Test;
14+
use PHPUnit\Framework\Attributes\TestDox;
15+
use PHPUnit\Framework\TestCase;
16+
use Rubix\ML\Exceptions\InvalidArgumentException;
17+
use Rubix\ML\NeuralNet\Parameters\Parameter;
18+
use Rubix\ML\NeuralNet\Optimizers\Stochastic\Stochastic;
19+
20+
#[Group('Optimizers')]
21+
#[CoversClass(Stochastic::class)]
22+
class StochasticTest extends TestCase
23+
{
24+
protected Stochastic $optimizer;
25+
26+
public static function stepProvider() : Generator
27+
{
28+
yield [
29+
new Parameter(NumPower::array([
30+
[0.1, 0.6, -0.4],
31+
[0.5, 0.6, -0.4],
32+
[0.1, 0.1, -0.7],
33+
])),
34+
NumPower::array([
35+
[0.01, 0.05, -0.02],
36+
[-0.01, 0.02, 0.03],
37+
[0.04, -0.01, -0.5],
38+
]),
39+
[
40+
[0.00001, 0.00005, -0.00002],
41+
[-0.00001, 0.00002, 0.00003],
42+
[0.00004, -0.00001, -0.0005],
43+
],
44+
];
45+
}
46+
47+
protected function setUp() : void
48+
{
49+
$this->optimizer = new Stochastic(0.001);
50+
}
51+
52+
#[Test]
53+
#[TestDox('Throws exception when constructed with invalid learning rate')]
54+
public function testConstructorWithInvalidRate() : void
55+
{
56+
$this->expectException(InvalidArgumentException::class);
57+
58+
new Stochastic(0.0);
59+
}
60+
61+
#[Test]
62+
#[TestDox('Can be cast to a string')]
63+
public function testToString() : void
64+
{
65+
self::assertEquals('Stochastic (rate: 0.001)', (string) $this->optimizer);
66+
}
67+
68+
/**
69+
* @param Parameter $param
70+
* @param NDArray $gradient
71+
* @param list<list<float>> $expected
72+
*/
73+
#[DataProvider('stepProvider')]
74+
public function testStep(Parameter $param, NDArray $gradient, array $expected) : void
75+
{
76+
$step = $this->optimizer->step(param: $param, gradient: $gradient);
77+
78+
self::assertEqualsWithDelta($expected, $step->toArray(), 1e-7);
79+
}
80+
}
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
<?php
2+
3+
declare(strict_types = 1);
4+
5+
namespace Rubix\ML\Tests\NeuralNet\Parameters;
6+
7+
use NumPower;
8+
use PHPUnit\Framework\Attributes\CoversClass;
9+
use PHPUnit\Framework\Attributes\Group;
10+
use Rubix\ML\NeuralNet\Optimizers\Base\Optimizer;
11+
use Rubix\ML\NeuralNet\Parameters\Parameter;
12+
use Rubix\ML\NeuralNet\Optimizers\Stochastic\Stochastic;
13+
use PHPUnit\Framework\TestCase;
14+
15+
#[Group('Parameters')]
16+
#[CoversClass(Parameter::class)]
17+
class ParameterTest extends TestCase
18+
{
19+
protected Parameter $param;
20+
21+
protected Optimizer $optimizer;
22+
23+
protected function setUp() : void
24+
{
25+
$this->param = new Parameter(NumPower::array([
26+
[5, 4],
27+
[-2, 6],
28+
]));
29+
30+
$this->optimizer = new Stochastic();
31+
}
32+
33+
public function testUpdate() : void
34+
{
35+
$gradient = NumPower::array([
36+
[2, 1],
37+
[1, -2],
38+
]);
39+
40+
$expected = [
41+
[4.98, 3.99],
42+
[-2.01, 6.02],
43+
];
44+
45+
$this->param->update(gradient: $gradient, optimizer: $this->optimizer);
46+
47+
self::assertEqualsWithDelta($expected, $this->param->param()->toArray(), 1e-7);
48+
}
49+
}

0 commit comments

Comments
 (0)