Skip to content

Commit a6c808d

Browse files
authored
Merge pull request #41 from Sergio0694/dev
Dev
2 parents 23c5924 + c2518ab commit a6c808d

File tree

75 files changed

+2692
-2596
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

75 files changed

+2692
-2596
lines changed
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
using JetBrains.Annotations;
2+
using NeuralNetworkNET.APIs.Enums;
3+
using NeuralNetworkNET.APIs.Interfaces;
4+
using NeuralNetworkNET.APIs.Structs;
5+
using NeuralNetworkNET.Cuda.Layers;
6+
using NeuralNetworkNET.Networks.Activations;
7+
8+
namespace NeuralNetworkNET.APIs
9+
{
10+
/// <summary>
11+
/// A static class that exposes the available cuDNN network layer types
12+
/// </summary>
13+
public static class CuDnnNetworkLayers
14+
{
15+
/// <summary>
16+
/// Creates a new fully connected layer with the specified number of input and output neurons, and the given activation function
17+
/// </summary>
18+
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
19+
/// <param name="neurons">The number of output neurons</param>
20+
/// <param name="activation">The desired activation function to use in the network layer</param>
21+
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
22+
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
23+
[PublicAPI]
24+
[Pure, NotNull]
25+
public static INetworkLayer FullyConnected(
26+
in TensorInfo input, int neurons, ActivationFunctionType activation,
27+
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
28+
=> new CuDnnFullyConnectedLayer(input, neurons, activation, weightsMode, biasMode);
29+
30+
/// <summary>
31+
/// Creates a fully connected softmax output layer (used for classification problems with mutually-exclusive classes)
32+
/// </summary>
33+
/// <param name="input">The input <see cref="TensorInfo"/> descriptor</param>
34+
/// <param name="outputs">The number of output neurons</param>
35+
/// <param name="weightsMode">The desired initialization mode for the weights in the network layer</param>
36+
/// <param name="biasMode">The desired initialization mode to use for the layer bias values</param>
37+
[PublicAPI]
38+
[Pure, NotNull]
39+
public static INetworkLayer Softmax(
40+
in TensorInfo input, int outputs,
41+
WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
42+
=> new CuDnnSoftmaxLayer(input, outputs, weightsMode, biasMode);
43+
44+
/// <summary>
45+
/// Creates a convolutional layer with the desired number of kernels
46+
/// </summary>
47+
/// <param name="input">The input volume to process</param>
48+
/// <param name="kernel">The volume information of the kernels used in the layer</param>
49+
/// <param name="kernels">The number of convolution kernels to apply to the input volume</param>
50+
/// <param name="activation">The desired activation function to use in the network layer</param>
51+
/// <param name="mode">The desired convolution mode to use</param>
52+
/// <param name="biasMode">Indicates the desired initialization mode to use for the layer bias values</param>
53+
[PublicAPI]
54+
[Pure, NotNull]
55+
public static INetworkLayer Convolutional(
56+
TensorInfo input, (int X, int Y) kernel, int kernels, ActivationFunctionType activation,
57+
ConvolutionMode mode = ConvolutionMode.Convolution,
58+
BiasInitializationMode biasMode = BiasInitializationMode.Zero)
59+
=> new CuDnnConvolutionalLayer(input, kernel, kernels, activation, mode, biasMode);
60+
61+
/// <summary>
62+
/// Creates a pooling layer with a window of size 2 and a stride of 2
63+
/// </summary>
64+
/// <param name="input">The input volume to pool</param>
65+
/// <param name="activation">The desired activation function to use in the network layer</param>
66+
[PublicAPI]
67+
[Pure, NotNull]
68+
public static INetworkLayer Pooling(TensorInfo input, ActivationFunctionType activation) => new CuDnnPoolingLayer(input, activation);
69+
}
70+
}
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
namespace NeuralNetworkNET.APIs.Enums
2+
{
3+
/// <summary>
4+
/// A simple wrapper over the <see cref="Alea.cuDNN.ConvolutionMode"/> <see cref="enum"/>
5+
/// </summary>
6+
public enum ConvolutionMode
7+
{
8+
/// <summary>
9+
/// The default convolution mode, with the kernel taargeting pixels in the opposite position
10+
/// </summary>
11+
Convolution = 0,
12+
13+
/// <summary>
14+
/// The cross-correlation mode (equivalent to a convolution with a flipped kernel)
15+
/// </summary>
16+
CrossCorrelation = 1
17+
}
18+
}

NeuralNetwork.NET.Cuda/APIs/NeuralNetworkGpuPreferences.cs

Lines changed: 0 additions & 47 deletions
This file was deleted.

NeuralNetwork.NET.Cuda/APIs/ProcessingMode.cs

Lines changed: 0 additions & 18 deletions
This file was deleted.

0 commit comments

Comments
 (0)