Skip to content

Commit 9e7176e

Browse files
committed
APIs improvements
1 parent bfa35a7 commit 9e7176e

File tree

8 files changed

+77
-90
lines changed

8 files changed

+77
-90
lines changed
Lines changed: 14 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
using JetBrains.Annotations;
22
using NeuralNetworkNET.APIs.Enums;
3-
using NeuralNetworkNET.APIs.Interfaces;
43
using NeuralNetworkNET.APIs.Structs;
54
using NeuralNetworkNET.Cuda.Layers;
65
using NeuralNetworkNET.Networks.Activations;
@@ -13,72 +12,64 @@ namespace NeuralNetworkNET.APIs
1312
public static class CuDnnNetworkLayers
1413
{
1514
/// <summary>
16-
/// Creates a new fully connected layer with the specified number of input and output neurons, and the given activation function
15+
/// Creates a new fully connected layer with the specified number of output neurons, and the given activation function
1716
/// </summary>
18-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
1917
/// <param name="neurons">The number of output neurons</param>
2018
/// <param name="activation">The desired activation function to use in the network layer</param>
2119
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
2220
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
2321
[PublicAPI]
2422
[Pure, NotNull]
25-
public static INetworkLayer FullyConnected(
26-
in TensorInfo input, int neurons, ActivationFunctionType activation,
23+
public static LayerFactory FullyConnected(
24+
int neurons, ActivationFunctionType activation,
2725
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
28-
=> new CuDnnFullyConnectedLayer(input, neurons, activation, weightsMode, biasMode);
26+
=> input => new CuDnnFullyConnectedLayer(input, neurons, activation, weightsMode, biasMode);
2927

3028
/// <summary>
3129
/// Creates a fully connected softmax output layer (used for classification problems with mutually-exclusive classes)
3230
/// </summary>
33-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
3431
/// <param name="outputs">The number of output neurons</param>
3532
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
3633
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
3734
[PublicAPI]
3835
[Pure, NotNull]
39-
public static INetworkLayer Softmax(
40-
in TensorInfo input, int outputs,
36+
public static LayerFactory Softmax(
37+
int outputs,
4138
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
42-
=> new CuDnnSoftmaxLayer(input, outputs, weightsMode, biasMode);
39+
=> input => new CuDnnSoftmaxLayer(input, outputs, weightsMode, biasMode);
4340

4441
/// <summary>
4542
/// Creates a convolutional layer with the desired number of kernels
4643
/// </summary>
47-
/// <param name="input">The input volume to process</param>
4844
/// <param name="info">The info on the convolution operation to perform</param>
4945
/// <param name="kernel">The volume information of the kernels used in the layer</param>
5046
/// <param name="kernels">The number of convolution kernels to apply to the input volume</param>
5147
/// <param name="activation">The desired activation function to use in the network layer</param>
5248
/// <param name="biasMode">Indicates the desired initialization mode to use for the layer bias values</param>
5349
[PublicAPI]
5450
[Pure, NotNull]
55-
public static INetworkLayer Convolutional(
56-
in TensorInfo input,
57-
in ConvolutionInfo info, (int X, int Y) kernel, int kernels, ActivationFunctionType activation,
51+
public static LayerFactory Convolutional(
52+
ConvolutionInfo info, (int X, int Y) kernel, int kernels, ActivationFunctionType activation,
5853
BiasInitializationMode biasMode = BiasInitializationMode.Zero)
59-
=> new CuDnnConvolutionalLayer(input, info, kernel, kernels, activation, biasMode);
54+
=> input => new CuDnnConvolutionalLayer(input, info, kernel, kernels, activation, biasMode);
6055

6156
/// <summary>
6257
/// Creates a pooling layer with a window of size 2 and a stride of 2
6358
/// </summary>
64-
/// <param name="input">The input volume to pool</param>
6559
/// <param name="info">The info on the pooling operation to perform</param>
6660
/// <param name="activation">The desired activation function to use in the network layer</param>
6761
[PublicAPI]
6862
[Pure, NotNull]
69-
public static INetworkLayer Pooling(in TensorInfo input, in PoolingInfo info, ActivationFunctionType activation) => new CuDnnPoolingLayer(input, info, activation);
63+
public static LayerFactory Pooling(PoolingInfo info, ActivationFunctionType activation) => input => new CuDnnPoolingLayer(input, info, activation);
7064

7165
/// <summary>
72-
/// Creates a new inception layer with the given input and features
66+
/// Creates a new inception layer with the given features
7367
/// </summary>
74-
/// <param name="input">The input volume to process</param>
7568
/// <param name="info">The info on the operations to execute inside the layer</param>
7669
/// <param name="biasMode">Indicates the desired initialization mode to use for the layer bias values</param>
7770
[PublicAPI]
7871
[Pure, NotNull]
79-
public static INetworkLayer Inception(
80-
in TensorInfo input, in InceptionInfo info,
81-
BiasInitializationMode biasMode = BiasInitializationMode.Zero)
82-
=> new CuDnnInceptionLayer(input, info, biasMode);
72+
public static LayerFactory Inception(InceptionInfo info, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
73+
=> input => new CuDnnInceptionLayer(input, info, biasMode);
8374
}
8475
}
Lines changed: 15 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
using JetBrains.Annotations;
22
using NeuralNetworkNET.APIs.Enums;
3-
using NeuralNetworkNET.APIs.Interfaces;
43
using NeuralNetworkNET.APIs.Structs;
54
using NeuralNetworkNET.Networks.Activations;
65
using NeuralNetworkNET.Networks.Cost;
@@ -14,72 +13,67 @@ namespace NeuralNetworkNET.APIs
1413
public static class NetworkLayers
1514
{
1615
/// <summary>
17-
/// Creates a new fully connected layer with the specified number of input and output neurons, and the given activation function
16+
/// Creates a new fully connected layer with the specified number of output neurons, and the given activation function
1817
/// </summary>
19-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
2018
/// <param name="neurons">The number of output neurons</param>
2119
/// <param name="activation">The desired activation function to use in the network layer</param>
2220
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
2321
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
2422
[PublicAPI]
2523
[Pure, NotNull]
26-
public static INetworkLayer FullyConnected(
27-
in TensorInfo input, int neurons, ActivationFunctionType activation,
28-
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
29-
=> new FullyConnectedLayer(input, neurons, activation, weightsMode, biasMode);
24+
public static LayerFactory FullyConnected(
25+
int neurons, ActivationFunctionType activation,
26+
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
27+
=> input => new FullyConnectedLayer(input, neurons, activation, weightsMode, biasMode);
3028

3129
/// <summary>
3230
/// Creates an output fully connected layer, with the specified cost function to use
3331
/// </summary>
34-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
3532
/// <param name="neurons">The number of output neurons</param>
3633
/// <param name="activation">The desired activation function to use in the network layer</param>
3734
/// <param name="cost">The cost function that should be used by the output layer</param>
3835
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
3936
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
4037
[PublicAPI]
4138
[Pure, NotNull]
42-
public static INetworkLayer FullyConnected(
43-
in TensorInfo input, int neurons, ActivationFunctionType activation, CostFunctionType cost,
39+
public static LayerFactory FullyConnected(
40+
int neurons, ActivationFunctionType activation, CostFunctionType cost,
4441
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
45-
=> new OutputLayer(input, neurons, activation, cost, weightsMode, biasMode);
42+
=> input => new OutputLayer(input, neurons, activation, cost, weightsMode, biasMode);
4643

4744
/// <summary>
4845
/// Creates a fully connected softmax output layer (used for classification problems with mutually-exclusive classes)
4946
/// </summary>
50-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
5147
/// <param name="outputs">The number of output neurons</param>
5248
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
5349
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
5450
[PublicAPI]
5551
[Pure, NotNull]
56-
public static INetworkLayer Softmax(
57-
in TensorInfo input, int outputs,
52+
public static LayerFactory Softmax(
53+
int outputs,
5854
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
59-
=> new SoftmaxLayer(input, outputs, weightsMode, biasMode);
55+
=> input => new SoftmaxLayer(input, outputs, weightsMode, biasMode);
6056

6157
/// <summary>
6258
/// Creates a convolutional layer with the desired number of kernels
6359
/// </summary>
64-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
6560
/// <param name="kernel">The volume information of the kernels used in the layer</param>
6661
/// <param name="kernels">The number of convolution kernels to apply to the input volume</param>
6762
/// <param name="activation">The desired activation function to use in the network layer</param>
6863
/// <param name="biasMode">Indicates the desired initialization mode to use for the layer bias values</param>
6964
[PublicAPI]
7065
[Pure, NotNull]
71-
public static INetworkLayer Convolutional(
72-
in TensorInfo input, (int X, int Y) kernel, int kernels, ActivationFunctionType activation,
66+
public static LayerFactory Convolutional(
67+
(int X, int Y) kernel, int kernels, ActivationFunctionType activation,
7368
BiasInitializationMode biasMode = BiasInitializationMode.Zero)
74-
=> new ConvolutionalLayer(input, ConvolutionInfo.Default, kernel, kernels, activation, biasMode);
69+
=> input => new ConvolutionalLayer(input, ConvolutionInfo.Default, kernel, kernels, activation, biasMode);
7570

7671
/// <summary>
7772
/// Creates a pooling layer with a window of size 2 and a stride of 2
7873
/// </summary>
79-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
8074
/// <param name="activation">The desired activation function to use in the network layer</param>
8175
[PublicAPI]
8276
[Pure, NotNull]
83-
public static INetworkLayer Pooling(in TensorInfo input, ActivationFunctionType activation) => new PoolingLayer(input, PoolingInfo.Default, activation);
77+
public static LayerFactory Pooling(ActivationFunctionType activation) => input => new PoolingLayer(input, PoolingInfo.Default, activation);
8478
}
8579
}

NeuralNetwork.NET/APIs/NetworkManager.cs

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -15,31 +15,33 @@
1515

1616
namespace NeuralNetworkNET.APIs
1717
{
18+
/// <summary>
19+
/// A <see cref="delegate"/> that represents a factory that produces instances of a specific layer type, with user-defined parameters.
20+
/// This wrapper acts as an intemediary to streamline the user-side C# sintax when building up a new network structure, as all the input
21+
/// details for each layer will be automatically computed during the network setup.
22+
/// </summary>
23+
/// <param name="info">The tensor info for the inputs of the upcoming network layer</param>
24+
/// <remarks>It is also possible to invoke a <see cref="LayerFactory"/> instance just like any other <see cref="delegate"/> to immediately get an <see cref="INetworkLayer"/> value</remarks>
25+
[NotNull]
26+
public delegate INetworkLayer LayerFactory(TensorInfo info);
27+
1828
/// <summary>
1929
/// A static class that create and trains a neural network for the input data and expected results
2030
/// </summary>
2131
public static class NetworkManager
2232
{
2333
/// <summary>
24-
/// Creates a new network with the specified parameters
25-
/// </summary>
26-
/// <param name="layers">The network layers to use</param>
27-
[PublicAPI]
28-
[Pure, NotNull]
29-
public static INeuralNetwork NewNetwork([NotNull, ItemNotNull] params INetworkLayer[] layers) => new NeuralNetwork(layers);
30-
31-
/// <summary>
32-
/// Creates a new network with the specified parameters
34+
/// Creates a new network with a linear structure and the specified parameters
3335
/// </summary>
3436
/// <param name="input">The input <see cref="TensorInfo"/> description</param>
3537
/// <param name="factories">A list of factories to create the different layers in the new network</param>
3638
[PublicAPI]
3739
[Pure, NotNull]
38-
public static INeuralNetwork NewNetwork(TensorInfo input, [NotNull, ItemNotNull] params Func<TensorInfo, INetworkLayer>[] factories)
40+
public static INeuralNetwork NewSequential(TensorInfo input, [NotNull, ItemNotNull] params LayerFactory[] factories)
3941
{
4042
IEnumerable<INetworkLayer> BuildLayers()
4143
{
42-
foreach (Func<TensorInfo, INetworkLayer> f in factories)
44+
foreach (LayerFactory f in factories)
4345
{
4446
INetworkLayer layer = f(input);
4547
yield return layer;

Samples/DigitsCudaTest/Program.cs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -22,16 +22,16 @@ static async Task Main()
2222
{
2323
// Parse the dataset and create the network
2424
(var training, var test) = DataParser.LoadDatasets();
25-
INeuralNetwork network = NetworkManager.NewNetwork(TensorInfo.CreateForGrayscaleImage(28, 28),
26-
t => CuDnnNetworkLayers.Convolutional(t, ConvolutionInfo.Default, (5, 5), 20, ActivationFunctionType.LeakyReLU),
27-
t => CuDnnNetworkLayers.Convolutional(t, ConvolutionInfo.Default, (5, 5), 20, ActivationFunctionType.Identity),
28-
t => CuDnnNetworkLayers.Pooling(t, PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
29-
t => CuDnnNetworkLayers.Convolutional(t, ConvolutionInfo.Default, (3, 3), 40, ActivationFunctionType.LeakyReLU),
30-
t => CuDnnNetworkLayers.Convolutional(t, ConvolutionInfo.Default, (3, 3), 40, ActivationFunctionType.Identity),
31-
t => CuDnnNetworkLayers.Pooling(t, PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
32-
t => CuDnnNetworkLayers.FullyConnected(t, 125, ActivationFunctionType.LeCunTanh),
33-
t => CuDnnNetworkLayers.FullyConnected(t, 64, ActivationFunctionType.LeCunTanh),
34-
t => CuDnnNetworkLayers.Softmax(t, 10));
25+
INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.CreateForGrayscaleImage(28, 28),
26+
CuDnnNetworkLayers.Convolutional(ConvolutionInfo.Default, (5, 5), 20, ActivationFunctionType.LeakyReLU),
27+
CuDnnNetworkLayers.Convolutional(ConvolutionInfo.Default, (5, 5), 20, ActivationFunctionType.Identity),
28+
CuDnnNetworkLayers.Pooling(PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
29+
CuDnnNetworkLayers.Convolutional(ConvolutionInfo.Default, (3, 3), 40, ActivationFunctionType.LeakyReLU),
30+
CuDnnNetworkLayers.Convolutional(ConvolutionInfo.Default, (3, 3), 40, ActivationFunctionType.Identity),
31+
CuDnnNetworkLayers.Pooling(PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
32+
CuDnnNetworkLayers.FullyConnected(125, ActivationFunctionType.LeCunTanh),
33+
CuDnnNetworkLayers.FullyConnected(64, ActivationFunctionType.LeCunTanh),
34+
CuDnnNetworkLayers.Softmax(10));
3535

3636
// Setup and start the training
3737
CancellationTokenSource cts = new CancellationTokenSource();

Samples/DigitsTest/Program.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,9 @@ class Program
1717
static async Task Main()
1818
{
1919
(var training, var test) = DataParser.LoadDatasets();
20-
INeuralNetwork network = NetworkManager.NewNetwork(TensorInfo.CreateForGrayscaleImage(28, 28),
21-
t => NetworkLayers.FullyConnected(t, 100, ActivationFunctionType.Sigmoid),
22-
t => NetworkLayers.FullyConnected(t, 10, ActivationFunctionType.Sigmoid, CostFunctionType.CrossEntropy));
20+
INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.CreateForGrayscaleImage(28, 28),
21+
NetworkLayers.FullyConnected(100, ActivationFunctionType.Sigmoid),
22+
NetworkLayers.FullyConnected(10, ActivationFunctionType.Sigmoid, CostFunctionType.CrossEntropy));
2323
TrainingSessionResult result = await NetworkManager.TrainNetworkAsync(network, (training.X, training.Y), 60, 10,
2424
TrainingAlgorithmsInfo.CreateForStochasticGradientDescent(), 0.5f,
2525
testParameters: new TestParameters(test, new Progress<BackpropagationProgressEventArgs>(p =>

0 commit comments

Comments
 (0)