Skip to content

Commit 7331499

Browse files
committed
Fixed a crash in the Cuda test method
1 parent 786f1bb commit 7331499

File tree

1 file changed

+9
-11
lines changed

1 file changed

+9
-11
lines changed

NeuralNetwork.NET/APIs/CuDnnNetworkLayers.cs

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -111,18 +111,16 @@ public static bool IsGpuAccelerationSupported()
111111
try
112112
{
113113
// CUDA test
114-
using (Alea.Gpu gpu = Alea.Gpu.Default)
114+
Alea.Gpu gpu = Alea.Gpu.Default;
115+
if (gpu == null) return false;
116+
if (!Alea.cuDNN.Dnn.IsAvailable) return false; // cuDNN
117+
using (Alea.DeviceMemory<float> sample_gpu = gpu.AllocateDevice<float>(1024))
115118
{
116-
if (gpu == null) return false;
117-
if (!Alea.cuDNN.Dnn.IsAvailable) return false; // cuDNN
118-
using (Alea.DeviceMemory<float> sample_gpu = gpu.AllocateDevice<float>(1024))
119-
{
120-
Alea.deviceptr<float> ptr = sample_gpu.Ptr;
121-
void Kernel(int i) => ptr[i] = i;
122-
Alea.Parallel.GpuExtension.For(gpu, 0, 1024, Kernel); // JIT test
123-
float[] sample = Alea.Gpu.CopyToHost(sample_gpu);
124-
return Enumerable.Range(0, 1024).Select<int, float>(i => i).ToArray().ContentEquals(sample);
125-
}
119+
Alea.deviceptr<float> ptr = sample_gpu.Ptr;
120+
void Kernel(int i) => ptr[i] = i;
121+
Alea.Parallel.GpuExtension.For(gpu, 0, 1024, Kernel); // JIT test
122+
float[] sample = Alea.Gpu.CopyToHost(sample_gpu);
123+
return Enumerable.Range(0, 1024).Select<int, float>(i => i).ToArray().ContentEquals(sample);
126124
}
127125
}
128126
catch

0 commit comments

Comments
 (0)