Skip to content

Commit 1f6e804

Browse files
committed
Tighter typehinting
1 parent 06ae7ee commit 1f6e804

21 files changed

+67
-60
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
- 1.3.2
2+
- Optimize Binary output layer
3+
14
- 1.3.1
25
- Update to Ok Bloomer 1.0 stable
36

src/NeuralNet/FeedForward.php

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ class FeedForward implements Network
3838
/**
3939
* The hidden layers of the network.
4040
*
41-
* @var \Rubix\ML\NeuralNet\Layers\Hidden[]
41+
* @var list<\Rubix\ML\NeuralNet\Layers\Hidden>
4242
*/
4343
protected array $hidden = [
4444
//
@@ -47,7 +47,7 @@ class FeedForward implements Network
4747
/**
4848
* The pathing of the backward pass through the hidden layers.
4949
*
50-
* @var \Rubix\ML\NeuralNet\Layers\Hidden[]
50+
* @var list<\Rubix\ML\NeuralNet\Layers\Hidden>
5151
*/
5252
protected array $backPass = [
5353
//
@@ -75,11 +75,15 @@ class FeedForward implements Network
7575
*/
7676
public function __construct(Input $input, array $hidden, Output $output, Optimizer $optimizer)
7777
{
78+
$hidden = array_values($hidden);
79+
80+
$backPass = array_reverse($hidden);
81+
7882
$this->input = $input;
7983
$this->hidden = $hidden;
8084
$this->output = $output;
8185
$this->optimizer = $optimizer;
82-
$this->backPass = array_reverse($hidden);
86+
$this->backPass = $backPass;
8387
}
8488

8589
/**
@@ -95,7 +99,7 @@ public function input() : Input
9599
/**
96100
* Return an array of hidden layers indexed left to right.
97101
*
98-
* @return \Rubix\ML\NeuralNet\Layers\Hidden[]
102+
* @return list<\Rubix\ML\NeuralNet\Layers\Hidden>
99103
*/
100104
public function hidden() : array
101105
{
@@ -131,7 +135,7 @@ public function layers() : Traversable
131135
*/
132136
public function initialize() : void
133137
{
134-
$fanIn = 0;
138+
$fanIn = 1;
135139

136140
foreach ($this->layers() as $layer) {
137141
$fanIn = $layer->initialize($fanIn);

src/NeuralNet/Layers/Activation.php

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ class Activation implements Hidden
3030
/**
3131
* The width of the layer.
3232
*
33-
* @var int<0,max>|null
33+
* @var positive-int|null
3434
*/
3535
protected ?int $width = null;
3636

@@ -62,7 +62,7 @@ public function __construct(ActivationFunction $activationFn)
6262
* @internal
6363
*
6464
* @throws \Rubix\ML\Exceptions\RuntimeException
65-
* @return int<0,max>
65+
* @return positive-int
6666
*/
6767
public function width() : int
6868
{
@@ -79,8 +79,8 @@ public function width() : int
7979
*
8080
* @internal
8181
*
82-
* @param int<0,max> $fanIn
83-
* @return int<0,max>
82+
* @param positive-int $fanIn
83+
* @return positive-int
8484
*/
8585
public function initialize(int $fanIn) : int
8686
{

src/NeuralNet/Layers/BatchNorm.php

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ class BatchNorm implements Hidden, Parametric
5757
/**
5858
* The width of the layer. i.e. the number of neurons.
5959
*
60-
* @var int<0,max>|null
60+
* @var positive-int|null
6161
*/
6262
protected ?int $width = null;
6363

@@ -130,7 +130,7 @@ public function __construct(
130130
* @internal
131131
*
132132
* @throws \Rubix\ML\Exceptions\RuntimeException
133-
* @return int<0,max>
133+
* @return positive-int
134134
*/
135135
public function width() : int
136136
{
@@ -147,8 +147,8 @@ public function width() : int
147147
*
148148
* @internal
149149
*
150-
* @param int<0,max> $fanIn
151-
* @return int<0,max>
150+
* @param positive-int $fanIn
151+
* @return positive-int
152152
*/
153153
public function initialize(int $fanIn) : int
154154
{

src/NeuralNet/Layers/Binary.php

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ public function __construct(array $classes, ?ClassificationLoss $costFn = null)
9191
/**
9292
* Return the width of the layer.
9393
*
94-
* @return int<0,max>
94+
* @return positive-int
9595
*/
9696
public function width() : int
9797
{
@@ -102,9 +102,9 @@ public function width() : int
102102
* Initialize the layer with the fan in from the previous layer and return
103103
* the fan out for this layer.
104104
*
105-
* @param int<0,max> $fanIn
105+
* @param positive-int $fanIn
106106
* @throws \Rubix\ML\Exceptions\InvalidArgumentException
107-
* @return int<0,max>
107+
* @return positive-int
108108
*/
109109
public function initialize(int $fanIn) : int
110110
{
@@ -193,10 +193,10 @@ public function gradient(Matrix $input, Matrix $output, Matrix $expected) : Matr
193193
->divide($output->n());
194194
}
195195

196-
$dL = $this->costFn->differentiate($output, $expected)
196+
$dLoss = $this->costFn->differentiate($output, $expected)
197197
->divide($output->n());
198198

199199
return $this->sigmoid->differentiate($input, $output)
200-
->multiply($dL);
200+
->multiply($dLoss);
201201
}
202202
}

src/NeuralNet/Layers/Continuous.php

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ public function __construct(?RegressionLoss $costFn = null)
4848
/**
4949
* Return the width of the layer.
5050
*
51-
* @return int<0,max>
51+
* @return positive-int
5252
*/
5353
public function width() : int
5454
{
@@ -59,9 +59,9 @@ public function width() : int
5959
* Initialize the layer with the fan in from the previous layer and return
6060
* the fan out for this layer.
6161
*
62-
* @param int<0,max> $fanIn
62+
* @param positive-int $fanIn
6363
* @throws \Rubix\ML\Exceptions\InvalidArgumentException
64-
* @return int<0,max>
64+
* @return positive-int
6565
*/
6666
public function initialize(int $fanIn) : int
6767
{

src/NeuralNet/Layers/Dense.php

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ public function __construct(
121121
*
122122
* @internal
123123
*
124-
* @return int<0,max>
124+
* @return positive-int
125125
*/
126126
public function width() : int
127127
{
@@ -151,8 +151,8 @@ public function weights() : Matrix
151151
*
152152
* @internal
153153
*
154-
* @param int<0,max> $fanIn
155-
* @return int<0,max>
154+
* @param positive-int $fanIn
155+
* @return positive-int
156156
*/
157157
public function initialize(int $fanIn) : int
158158
{

src/NeuralNet/Layers/Dropout.php

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ class Dropout implements Hidden
4343
/**
4444
* The width of the layer.
4545
*
46-
* @var int<0,max>|null
46+
* @var positive-int|null
4747
*/
4848
protected ?int $width = null;
4949

@@ -75,7 +75,7 @@ public function __construct(float $ratio = 0.5)
7575
* @internal
7676
*
7777
* @throws \Rubix\ML\Exceptions\RuntimeException
78-
* @return int<0,max>
78+
* @return positive-int
7979
*/
8080
public function width() : int
8181
{
@@ -92,8 +92,8 @@ public function width() : int
9292
*
9393
* @internal
9494
*
95-
* @param int<0,max> $fanIn
96-
* @return int<0,max>
95+
* @param positive-int $fanIn
96+
* @return positive-int
9797
*/
9898
public function initialize(int $fanIn) : int
9999
{

src/NeuralNet/Layers/Layer.php

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ interface Layer
1111
*
1212
* @internal
1313
*
14-
* @return int<0,max>
14+
* @return positive-int
1515
*/
1616
public function width() : int;
1717

@@ -21,8 +21,8 @@ public function width() : int;
2121
*
2222
* @internal
2323
*
24-
* @param int $fanIn
25-
* @return int
24+
* @param positive-int $fanIn
25+
* @return positive-int
2626
*/
2727
public function initialize(int $fanIn) : int;
2828

src/NeuralNet/Layers/Multiclass.php

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -87,20 +87,20 @@ public function __construct(array $classes, ?ClassificationLoss $costFn = null)
8787
/**
8888
* Return the width of the layer.
8989
*
90-
* @return int<0,max>
90+
* @return positive-int
9191
*/
9292
public function width() : int
9393
{
94-
return count($this->classes);
94+
return max(1, count($this->classes));
9595
}
9696

9797
/**
9898
* Initialize the layer with the fan in from the previous layer and return
9999
* the fan out for this layer.
100100
*
101-
* @param int<0,max> $fanIn
101+
* @param positive-int $fanIn
102102
* @throws \Rubix\ML\Exceptions\InvalidArgumentException
103-
* @return int<0,max>
103+
* @return positive-int
104104
*/
105105
public function initialize(int $fanIn) : int
106106
{
@@ -199,10 +199,10 @@ public function gradient(Matrix $input, Matrix $output, Matrix $expected) : Matr
199199
->divide($output->n());
200200
}
201201

202-
$dL = $this->costFn->differentiate($output, $expected)
202+
$dLoss = $this->costFn->differentiate($output, $expected)
203203
->divide($output->n());
204204

205205
return $this->softmax->differentiate($input, $output)
206-
->multiply($dL);
206+
->multiply($dLoss);
207207
}
208208
}

0 commit comments

Comments
 (0)