1
+ using JetBrains . Annotations ;
2
+ using NeuralNetworkNET . APIs . Enums ;
3
+ using NeuralNetworkNET . APIs . Interfaces ;
4
+ using NeuralNetworkNET . APIs . Structs ;
5
+ using NeuralNetworkNET . Cuda . Layers ;
6
+ using NeuralNetworkNET . Networks . Activations ;
7
+
8
+ namespace NeuralNetworkNET . APIs
9
+ {
10
+ /// <summary>
11
+ /// A static class that exposes the available cuDNN network layer types
12
+ /// </summary>
13
+ public static class CuDnnNetworkLayers
14
+ {
15
+ /// <summary>
16
+ /// Creates a new fully connected layer with the specified number of input and output neurons, and the given activation function
17
+ /// </summary>
18
+ /// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
19
+ /// <param name="neurons">The number of output neurons</param>
20
+ /// <param name="activation">The desired activation function to use in the network layer</param>
21
+ /// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
22
+ /// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
23
+ [ PublicAPI ]
24
+ [ Pure , NotNull ]
25
+ public static INetworkLayer FullyConnected (
26
+ in TensorInfo input , int neurons , ActivationFunctionType activation ,
27
+ WeightsInitializationMode weightsMode = WeightsInitializationMode . GlorotUniform , BiasInitializationMode biasMode = BiasInitializationMode . Zero )
28
+ => new CuDnnFullyConnectedLayer ( input , neurons , activation , weightsMode , biasMode ) ;
29
+
30
+ /// <summary>
31
+ /// Creates a fully connected softmax output layer (used for classification problems with mutually-exclusive classes)
32
+ /// </summary>
33
+ /// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
34
+ /// <param name="outputs">The number of output neurons</param>
35
+ /// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
36
+ /// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
37
+ [ PublicAPI ]
38
+ [ Pure , NotNull ]
39
+ public static INetworkLayer Softmax (
40
+ in TensorInfo input , int outputs ,
41
+ WeightsInitializationMode weightsMode = WeightsInitializationMode . GlorotUniform , BiasInitializationMode biasMode = BiasInitializationMode . Zero )
42
+ => new CuDnnSoftmaxLayer ( input , outputs , weightsMode , biasMode ) ;
43
+
44
+ /// <summary>
45
+ /// Creates a convolutional layer with the desired number of kernels
46
+ /// </summary>
47
+ /// <param name="input">The input volume to process</param>
48
+ /// <param name="kernel">The volume information of the kernels used in the layer</param>
49
+ /// <param name="kernels">The number of convolution kernels to apply to the input volume</param>
50
+ /// <param name="activation">The desired activation function to use in the network layer</param>
51
+ /// <param name="mode">The desired convolution mode to use</param>
52
+ /// <param name="biasMode">Indicates the desired initialization mode to use for the layer bias values</param>
53
+ [ PublicAPI ]
54
+ [ Pure , NotNull ]
55
+ public static INetworkLayer Convolutional (
56
+ TensorInfo input , ( int X , int Y ) kernel , int kernels , ActivationFunctionType activation ,
57
+ ConvolutionMode mode = ConvolutionMode . Convolution ,
58
+ BiasInitializationMode biasMode = BiasInitializationMode . Zero )
59
+ => new CuDnnConvolutionalLayer ( input , kernel , kernels , activation , mode , biasMode ) ;
60
+
61
+ /// <summary>
62
+ /// Creates a pooling layer with a window of size 2 and a stride of 2
63
+ /// </summary>
64
+ /// <param name="input">The input volume to pool</param>
65
+ /// <param name="activation">The desired activation function to use in the network layer</param>
66
+ [ PublicAPI ]
67
+ [ Pure , NotNull ]
68
+ public static INetworkLayer Pooling ( TensorInfo input , ActivationFunctionType activation ) => new CuDnnPoolingLayer ( input , activation ) ;
69
+ }
70
+ }
0 commit comments