1
1
using JetBrains . Annotations ;
2
2
using NeuralNetworkNET . APIs . Enums ;
3
- using NeuralNetworkNET . APIs . Interfaces ;
4
3
using NeuralNetworkNET . APIs . Structs ;
5
4
using NeuralNetworkNET . Networks . Activations ;
6
5
using NeuralNetworkNET . Networks . Cost ;
@@ -14,72 +13,67 @@ namespace NeuralNetworkNET.APIs
14
13
public static class NetworkLayers
15
14
{
16
15
/// <summary>
17
- /// Creates a new fully connected layer with the specified number of input and output neurons, and the given activation function
16
+ /// Creates a new fully connected layer with the specified number of output neurons, and the given activation function
18
17
/// </summary>
19
- /// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
20
18
/// <param name="neurons">The number of output neurons</param>
21
19
/// <param name="activation">The desired activation function to use in the network layer</param>
22
20
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
23
21
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
24
22
[ PublicAPI ]
25
23
[ Pure , NotNull ]
26
- public static INetworkLayer FullyConnected (
27
- in TensorInfo input , int neurons , ActivationFunctionType activation ,
28
- WeightsInitializationMode weightsMode = WeightsInitializationMode . GlorotUniform , BiasInitializationMode biasMode = BiasInitializationMode . Zero )
29
- => new FullyConnectedLayer ( input , neurons , activation , weightsMode , biasMode ) ;
24
+ public static LayerFactory FullyConnected (
25
+ int neurons , ActivationFunctionType activation ,
26
+ WeightsInitializationMode weightsMode = WeightsInitializationMode . GlorotUniform , BiasInitializationMode biasMode = BiasInitializationMode . Zero )
27
+ => input => new FullyConnectedLayer ( input , neurons , activation , weightsMode , biasMode ) ;
30
28
31
29
/// <summary>
32
30
/// Creates an output fully connected layer, with the specified cost function to use
33
31
/// </summary>
34
- /// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
35
32
/// <param name="neurons">The number of output neurons</param>
36
33
/// <param name="activation">The desired activation function to use in the network layer</param>
37
34
/// <param name="cost">The cost function that should be used by the output layer</param>
38
35
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
39
36
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
40
37
[ PublicAPI ]
41
38
[ Pure , NotNull ]
42
- public static INetworkLayer FullyConnected (
43
- in TensorInfo input , int neurons , ActivationFunctionType activation , CostFunctionType cost ,
39
+ public static LayerFactory FullyConnected (
40
+ int neurons , ActivationFunctionType activation , CostFunctionType cost ,
44
41
WeightsInitializationMode weightsMode = WeightsInitializationMode . GlorotUniform , BiasInitializationMode biasMode = BiasInitializationMode . Zero )
45
- => new OutputLayer ( input , neurons , activation , cost , weightsMode , biasMode ) ;
42
+ => input => new OutputLayer ( input , neurons , activation , cost , weightsMode , biasMode ) ;
46
43
47
44
/// <summary>
48
45
/// Creates a fully connected softmax output layer (used for classification problems with mutually-exclusive classes)
49
46
/// </summary>
50
- /// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
51
47
/// <param name="outputs">The number of output neurons</param>
52
48
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
53
49
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
54
50
[ PublicAPI ]
55
51
[ Pure , NotNull ]
56
- public static INetworkLayer Softmax (
57
- in TensorInfo input , int outputs ,
52
+ public static LayerFactory Softmax (
53
+ int outputs ,
58
54
WeightsInitializationMode weightsMode = WeightsInitializationMode . GlorotUniform , BiasInitializationMode biasMode = BiasInitializationMode . Zero )
59
- => new SoftmaxLayer ( input , outputs , weightsMode , biasMode ) ;
55
+ => input => new SoftmaxLayer ( input , outputs , weightsMode , biasMode ) ;
60
56
61
57
/// <summary>
62
58
/// Creates a convolutional layer with the desired number of kernels
63
59
/// </summary>
64
- /// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
65
60
/// <param name="kernel">The volume information of the kernels used in the layer</param>
66
61
/// <param name="kernels">The number of convolution kernels to apply to the input volume</param>
67
62
/// <param name="activation">The desired activation function to use in the network layer</param>
68
63
/// <param name="biasMode">Indicates the desired initialization mode to use for the layer bias values</param>
69
64
[ PublicAPI ]
70
65
[ Pure , NotNull ]
71
- public static INetworkLayer Convolutional (
72
- in TensorInfo input , ( int X , int Y ) kernel , int kernels , ActivationFunctionType activation ,
66
+ public static LayerFactory Convolutional (
67
+ ( int X , int Y ) kernel , int kernels , ActivationFunctionType activation ,
73
68
BiasInitializationMode biasMode = BiasInitializationMode . Zero )
74
- => new ConvolutionalLayer ( input , ConvolutionInfo . Default , kernel , kernels , activation , biasMode ) ;
69
+ => input => new ConvolutionalLayer ( input , ConvolutionInfo . Default , kernel , kernels , activation , biasMode ) ;
75
70
76
71
/// <summary>
77
72
/// Creates a pooling layer with a window of size 2 and a stride of 2
78
73
/// </summary>
79
- /// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
80
74
/// <param name="activation">The desired activation function to use in the network layer</param>
81
75
[ PublicAPI ]
82
76
[ Pure , NotNull ]
83
- public static INetworkLayer Pooling ( in TensorInfo input , ActivationFunctionType activation ) => new PoolingLayer ( input , PoolingInfo . Default , activation ) ;
77
+ public static LayerFactory Pooling ( ActivationFunctionType activation ) => input => new PoolingLayer ( input , PoolingInfo . Default , activation ) ;
84
78
}
85
79
}
0 commit comments