Skip to content

Commit e6889d1

Browse files
committed
Merge branch 'dev' into feature_unified-library
2 parents d983f36 + b786ab5 commit e6889d1

File tree

10 files changed

+86
-90
lines changed

10 files changed

+86
-90
lines changed

NeuralNetwork.NET/APIs/CuDnnNetworkLayers.cs

Lines changed: 13 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
using System.Linq;
33
using JetBrains.Annotations;
44
using NeuralNetworkNET.APIs.Enums;
5-
using NeuralNetworkNET.APIs.Interfaces;
65
using NeuralNetworkNET.APIs.Structs;
76
using NeuralNetworkNET.Cuda.Layers;
87
using NeuralNetworkNET.Extensions;
@@ -39,71 +38,63 @@ public static bool IsCudaSupportAvailable
3938
/// <summary>
4039
/// Creates a new fully connected layer with the specified number of input and output neurons, and the given activation function
4140
/// </summary>
42-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
4341
/// <param name="neurons">The number of output neurons</param>
4442
/// <param name="activation">The desired activation function to use in the network layer</param>
4543
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
4644
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
4745
[PublicAPI]
4846
[Pure, NotNull]
49-
public static INetworkLayer FullyConnected(
50-
in TensorInfo input, int neurons, ActivationFunctionType activation,
47+
public static LayerFactory FullyConnected(
48+
int neurons, ActivationFunctionType activation,
5149
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
52-
=> new CuDnnFullyConnectedLayer(input, neurons, activation, weightsMode, biasMode);
50+
=> input => new CuDnnFullyConnectedLayer(input, neurons, activation, weightsMode, biasMode);
5351

5452
/// <summary>
5553
/// Creates a fully connected softmax output layer (used for classification problems with mutually-exclusive classes)
5654
/// </summary>
57-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
5855
/// <param name="outputs">The number of output neurons</param>
5956
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
6057
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
6158
[PublicAPI]
6259
[Pure, NotNull]
63-
public static INetworkLayer Softmax(
64-
in TensorInfo input, int outputs,
60+
public static LayerFactory Softmax(
61+
int outputs,
6562
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
66-
=> new CuDnnSoftmaxLayer(input, outputs, weightsMode, biasMode);
63+
=> input => new CuDnnSoftmaxLayer(input, outputs, weightsMode, biasMode);
6764

6865
/// <summary>
6966
/// Creates a convolutional layer with the desired number of kernels
7067
/// </summary>
71-
/// <param name="input">The input volume to process</param>
7268
/// <param name="info">The info on the convolution operation to perform</param>
7369
/// <param name="kernel">The volume information of the kernels used in the layer</param>
7470
/// <param name="kernels">The number of convolution kernels to apply to the input volume</param>
7571
/// <param name="activation">The desired activation function to use in the network layer</param>
7672
/// <param name="biasMode">Indicates the desired initialization mode to use for the layer bias values</param>
7773
[PublicAPI]
7874
[Pure, NotNull]
79-
public static INetworkLayer Convolutional(
80-
in TensorInfo input,
81-
in ConvolutionInfo info, (int X, int Y) kernel, int kernels, ActivationFunctionType activation,
75+
public static LayerFactory Convolutional(
76+
ConvolutionInfo info, (int X, int Y) kernel, int kernels, ActivationFunctionType activation,
8277
BiasInitializationMode biasMode = BiasInitializationMode.Zero)
83-
=> new CuDnnConvolutionalLayer(input, info, kernel, kernels, activation, biasMode);
78+
=> input => new CuDnnConvolutionalLayer(input, info, kernel, kernels, activation, biasMode);
8479

8580
/// <summary>
8681
/// Creates a pooling layer with a window of size 2 and a stride of 2
8782
/// </summary>
88-
/// <param name="input">The input volume to pool</param>
8983
/// <param name="info">The info on the pooling operation to perform</param>
9084
/// <param name="activation">The desired activation function to use in the network layer</param>
9185
[PublicAPI]
9286
[Pure, NotNull]
93-
public static INetworkLayer Pooling(in TensorInfo input, in PoolingInfo info, ActivationFunctionType activation) => new CuDnnPoolingLayer(input, info, activation);
87+
public static LayerFactory Pooling(PoolingInfo info, ActivationFunctionType activation) => input => new CuDnnPoolingLayer(input, info, activation);
9488

9589
/// <summary>
96-
/// Creates a new inception layer with the given input and features
90+
/// Creates a new inception layer with the given features
9791
/// </summary>
98-
/// <param name="input">The input volume to process</param>
9992
/// <param name="info">The info on the operations to execute inside the layer</param>
10093
/// <param name="biasMode">Indicates the desired initialization mode to use for the layer bias values</param>
10194
[PublicAPI]
10295
[Pure, NotNull]
103-
public static INetworkLayer Inception(
104-
in TensorInfo input, in InceptionInfo info,
105-
BiasInitializationMode biasMode = BiasInitializationMode.Zero)
106-
=> new CuDnnInceptionLayer(input, info, biasMode);
96+
public static LayerFactory Inception(InceptionInfo info, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
97+
=> input => new CuDnnInceptionLayer(input, info, biasMode);
10798

10899
#region Feature helper
109100

NeuralNetwork.NET/APIs/Interfaces/INeuralNetwork.cs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,11 @@ public interface INeuralNetwork : IEquatable<INeuralNetwork>, IClonable<INeuralN
2929
[NotNull, ItemNotNull]
3030
IReadOnlyList<INetworkLayer> Layers { get; }
3131

32+
/// <summary>
33+
/// Gets the total number of parameters in the current network layer
34+
/// </summary>
35+
int Parameters { get; }
36+
3237
#endregion
3338

3439
#region Methods
Lines changed: 15 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
using JetBrains.Annotations;
22
using NeuralNetworkNET.APIs.Enums;
3-
using NeuralNetworkNET.APIs.Interfaces;
43
using NeuralNetworkNET.APIs.Structs;
54
using NeuralNetworkNET.Networks.Activations;
65
using NeuralNetworkNET.Networks.Cost;
@@ -14,72 +13,67 @@ namespace NeuralNetworkNET.APIs
1413
public static class NetworkLayers
1514
{
1615
/// <summary>
17-
/// Creates a new fully connected layer with the specified number of input and output neurons, and the given activation function
16+
/// Creates a new fully connected layer with the specified number of output neurons, and the given activation function
1817
/// </summary>
19-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
2018
/// <param name="neurons">The number of output neurons</param>
2119
/// <param name="activation">The desired activation function to use in the network layer</param>
2220
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
2321
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
2422
[PublicAPI]
2523
[Pure, NotNull]
26-
public static INetworkLayer FullyConnected(
27-
in TensorInfo input, int neurons, ActivationFunctionType activation,
28-
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
29-
=> new FullyConnectedLayer(input, neurons, activation, weightsMode, biasMode);
24+
public static LayerFactory FullyConnected(
25+
int neurons, ActivationFunctionType activation,
26+
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
27+
=> input => new FullyConnectedLayer(input, neurons, activation, weightsMode, biasMode);
3028

3129
/// <summary>
3230
/// Creates an output fully connected layer, with the specified cost function to use
3331
/// </summary>
34-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
3532
/// <param name="neurons">The number of output neurons</param>
3633
/// <param name="activation">The desired activation function to use in the network layer</param>
3734
/// <param name="cost">The cost function that should be used by the output layer</param>
3835
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
3936
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
4037
[PublicAPI]
4138
[Pure, NotNull]
42-
public static INetworkLayer FullyConnected(
43-
in TensorInfo input, int neurons, ActivationFunctionType activation, CostFunctionType cost,
39+
public static LayerFactory FullyConnected(
40+
int neurons, ActivationFunctionType activation, CostFunctionType cost,
4441
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
45-
=> new OutputLayer(input, neurons, activation, cost, weightsMode, biasMode);
42+
=> input => new OutputLayer(input, neurons, activation, cost, weightsMode, biasMode);
4643

4744
/// <summary>
4845
/// Creates a fully connected softmax output layer (used for classification problems with mutually-exclusive classes)
4946
/// </summary>
50-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
5147
/// <param name="outputs">The number of output neurons</param>
5248
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
5349
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
5450
[PublicAPI]
5551
[Pure, NotNull]
56-
public static INetworkLayer Softmax(
57-
in TensorInfo input, int outputs,
52+
public static LayerFactory Softmax(
53+
int outputs,
5854
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
59-
=> new SoftmaxLayer(input, outputs, weightsMode, biasMode);
55+
=> input => new SoftmaxLayer(input, outputs, weightsMode, biasMode);
6056

6157
/// <summary>
6258
/// Creates a convolutional layer with the desired number of kernels
6359
/// </summary>
64-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
6560
/// <param name="kernel">The volume information of the kernels used in the layer</param>
6661
/// <param name="kernels">The number of convolution kernels to apply to the input volume</param>
6762
/// <param name="activation">The desired activation function to use in the network layer</param>
6863
/// <param name="biasMode">Indicates the desired initialization mode to use for the layer bias values</param>
6964
[PublicAPI]
7065
[Pure, NotNull]
71-
public static INetworkLayer Convolutional(
72-
in TensorInfo input, (int X, int Y) kernel, int kernels, ActivationFunctionType activation,
66+
public static LayerFactory Convolutional(
67+
(int X, int Y) kernel, int kernels, ActivationFunctionType activation,
7368
BiasInitializationMode biasMode = BiasInitializationMode.Zero)
74-
=> new ConvolutionalLayer(input, ConvolutionInfo.Default, kernel, kernels, activation, biasMode);
69+
=> input => new ConvolutionalLayer(input, ConvolutionInfo.Default, kernel, kernels, activation, biasMode);
7570

7671
/// <summary>
7772
/// Creates a pooling layer with a window of size 2 and a stride of 2
7873
/// </summary>
79-
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
8074
/// <param name="activation">The desired activation function to use in the network layer</param>
8175
[PublicAPI]
8276
[Pure, NotNull]
83-
public static INetworkLayer Pooling(in TensorInfo input, ActivationFunctionType activation) => new PoolingLayer(input, PoolingInfo.Default, activation);
77+
public static LayerFactory Pooling(ActivationFunctionType activation) => input => new PoolingLayer(input, PoolingInfo.Default, activation);
8478
}
8579
}

NeuralNetwork.NET/APIs/NetworkManager.cs

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -15,31 +15,33 @@
1515

1616
namespace NeuralNetworkNET.APIs
1717
{
18+
/// <summary>
19+
/// A <see cref="delegate"/> that represents a factory that produces instances of a specific layer type, with user-defined parameters.
20+
/// This wrapper acts as an intemediary to streamline the user-side C# sintax when building up a new network structure, as all the input
21+
/// details for each layer will be automatically computed during the network setup.
22+
/// </summary>
23+
/// <param name="info">The tensor info for the inputs of the upcoming network layer</param>
24+
/// <remarks>It is also possible to invoke a <see cref="LayerFactory"/> instance just like any other <see cref="delegate"/> to immediately get an <see cref="INetworkLayer"/> value</remarks>
25+
[NotNull]
26+
public delegate INetworkLayer LayerFactory(TensorInfo info);
27+
1828
/// <summary>
1929
/// A static class that create and trains a neural network for the input data and expected results
2030
/// </summary>
2131
public static class NetworkManager
2232
{
2333
/// <summary>
24-
/// Creates a new network with the specified parameters
25-
/// </summary>
26-
/// <param name="layers">The network layers to use</param>
27-
[PublicAPI]
28-
[Pure, NotNull]
29-
public static INeuralNetwork NewNetwork([NotNull, ItemNotNull] params INetworkLayer[] layers) => new NeuralNetwork(layers);
30-
31-
/// <summary>
32-
/// Creates a new network with the specified parameters
34+
/// Creates a new network with a linear structure and the specified parameters
3335
/// </summary>
3436
/// <param name="input">The input <see cref="TensorInfo"/> description</param>
3537
/// <param name="factories">A list of factories to create the different layers in the new network</param>
3638
[PublicAPI]
3739
[Pure, NotNull]
38-
public static INeuralNetwork NewNetwork(TensorInfo input, [NotNull, ItemNotNull] params Func<TensorInfo, INetworkLayer>[] factories)
40+
public static INeuralNetwork NewSequential(TensorInfo input, [NotNull, ItemNotNull] params LayerFactory[] factories)
3941
{
4042
IEnumerable<INetworkLayer> BuildLayers()
4143
{
42-
foreach (Func<TensorInfo, INetworkLayer> f in factories)
44+
foreach (LayerFactory f in factories)
4345
{
4446
INetworkLayer layer = f(input);
4547
yield return layer;

NeuralNetwork.NET/Networks/Implementations/NeuralNetwork.cs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,13 +40,17 @@ public sealed class NeuralNetwork : INeuralNetwork
4040
/// <inheritdoc/>
4141
public IReadOnlyList<INetworkLayer> Layers => _Layers;
4242

43+
/// <inheritdoc/>
44+
[JsonProperty(nameof(Parameters), Order = 3)]
45+
public int Parameters => Layers.Sum(l => l is WeightedLayerBase weighted ? weighted.Weights.Length + weighted.Biases.Length : 0);
46+
4347
#endregion
4448

4549
/// <summary>
4650
/// The list of layers that make up the neural network
4751
/// </summary>
4852
[NotNull, ItemNotNull]
49-
[JsonProperty(nameof(Layers), Order = 3)]
53+
[JsonProperty(nameof(Layers), Order = 4)]
5054
internal readonly NetworkLayerBase[] _Layers;
5155

5256
// The list of layers with weights to update

Samples/DigitsCudaTest/Program.cs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -22,16 +22,16 @@ static async Task Main()
2222
{
2323
// Parse the dataset and create the network
2424
(var training, var test) = DataParser.LoadDatasets();
25-
INeuralNetwork network = NetworkManager.NewNetwork(TensorInfo.CreateForGrayscaleImage(28, 28),
26-
t => CuDnnNetworkLayers.Convolutional(t, ConvolutionInfo.Default, (5, 5), 20, ActivationFunctionType.LeakyReLU),
27-
t => CuDnnNetworkLayers.Convolutional(t, ConvolutionInfo.Default, (5, 5), 20, ActivationFunctionType.Identity),
28-
t => CuDnnNetworkLayers.Pooling(t, PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
29-
t => CuDnnNetworkLayers.Convolutional(t, ConvolutionInfo.Default, (3, 3), 40, ActivationFunctionType.LeakyReLU),
30-
t => CuDnnNetworkLayers.Convolutional(t, ConvolutionInfo.Default, (3, 3), 40, ActivationFunctionType.Identity),
31-
t => CuDnnNetworkLayers.Pooling(t, PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
32-
t => CuDnnNetworkLayers.FullyConnected(t, 125, ActivationFunctionType.LeCunTanh),
33-
t => CuDnnNetworkLayers.FullyConnected(t, 64, ActivationFunctionType.LeCunTanh),
34-
t => CuDnnNetworkLayers.Softmax(t, 10));
25+
INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.CreateForGrayscaleImage(28, 28),
26+
CuDnnNetworkLayers.Convolutional(ConvolutionInfo.Default, (5, 5), 20, ActivationFunctionType.LeakyReLU),
27+
CuDnnNetworkLayers.Convolutional(ConvolutionInfo.Default, (5, 5), 20, ActivationFunctionType.Identity),
28+
CuDnnNetworkLayers.Pooling(PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
29+
CuDnnNetworkLayers.Convolutional(ConvolutionInfo.Default, (3, 3), 40, ActivationFunctionType.LeakyReLU),
30+
CuDnnNetworkLayers.Convolutional(ConvolutionInfo.Default, (3, 3), 40, ActivationFunctionType.Identity),
31+
CuDnnNetworkLayers.Pooling(PoolingInfo.Default, ActivationFunctionType.LeakyReLU),
32+
CuDnnNetworkLayers.FullyConnected(125, ActivationFunctionType.LeCunTanh),
33+
CuDnnNetworkLayers.FullyConnected(64, ActivationFunctionType.LeCunTanh),
34+
CuDnnNetworkLayers.Softmax(10));
3535

3636
// Setup and start the training
3737
CancellationTokenSource cts = new CancellationTokenSource();

Samples/DigitsTest/Program.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,9 @@ class Program
1717
static async Task Main()
1818
{
1919
(var training, var test) = DataParser.LoadDatasets();
20-
INeuralNetwork network = NetworkManager.NewNetwork(TensorInfo.CreateForGrayscaleImage(28, 28),
21-
t => NetworkLayers.FullyConnected(t, 100, ActivationFunctionType.Sigmoid),
22-
t => NetworkLayers.FullyConnected(t, 10, ActivationFunctionType.Sigmoid, CostFunctionType.CrossEntropy));
20+
INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.CreateForGrayscaleImage(28, 28),
21+
NetworkLayers.FullyConnected(100, ActivationFunctionType.Sigmoid),
22+
NetworkLayers.FullyConnected(10, ActivationFunctionType.Sigmoid, CostFunctionType.CrossEntropy));
2323
TrainingSessionResult result = await NetworkManager.TrainNetworkAsync(network, (training.X, training.Y), 60, 10,
2424
TrainingAlgorithmsInfo.CreateForStochasticGradientDescent(), 0.5f,
2525
testParameters: new TestParameters(test, new Progress<BackpropagationProgressEventArgs>(p =>

0 commit comments

Comments
 (0)