Skip to content
This repository was archived by the owner on Mar 12, 2021. It is now read-only.

Commit f4ce643

Browse files
author
Rodrigo Vargas
committed
Testing activations in src/nnlib.jl
1 parent 362f914 commit f4ce643

File tree

1 file changed

+10
-0
lines changed

1 file changed

+10
-0
lines changed

test/dnn.jl

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,16 @@ end
7878
@test testf(CuArrays.CUDNN.cudnnAddTensor, cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)))
7979
@test testf(CuArrays.CUDNN.cudnnActivationForward, cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)))
8080
@test testf(CuArrays.CUDNN.cudnnActivationBackward, cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)))
81+
82+
# activations defined in src/nnlib.jl
83+
for dims in ((5,5), (5,))
84+
for f in (σ, logσ, elu, swish, gelu, selu, softplus)
85+
@test testf(x -> f.(x), rand(Float64, dims))
86+
end
87+
end
88+
# softplus does not give `Inf` for large arguments
89+
x = cu([1000.])
90+
@test all(softplus.(x) .== x)
8191
end
8292

8393
@testset "Batchnorm" begin

0 commit comments

Comments
 (0)