Skip to content
This repository was archived by the owner on Mar 12, 2021. It is now read-only.

Commit ec99c2c

Browse files
bors[bot]Rodrigo Vargas
andauthored
Merge #518
518: Avoid exponential of positive numbers in softplus implementation r=maleadt a=vargonis Just use the standard trick to avoid getting `Inf` when broadcasting softplus to GPU arrays containing large positive numbers. Co-authored-by: Rodrigo Vargas <rodrigo@odd.co>
2 parents 4dedd0f + f4ce643 commit ec99c2c

File tree

2 files changed

+11
-1
lines changed

2 files changed

+11
-1
lines changed

src/nnlib.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,4 +28,4 @@ end
2828
λ * ifelse(x > 0, x/1, α * (exp(x) - 1))
2929
end
3030

31-
@cufunc softplus(x) = log1p(exp(x))
31+
@cufunc softplus(x) = ifelse(x > 0, x + log1p(exp(-x)), log1p(exp(x)))

test/dnn.jl

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,16 @@ end
7878
@test testf(CuArrays.CUDNN.cudnnAddTensor, cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)))
7979
@test testf(CuArrays.CUDNN.cudnnActivationForward, cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)))
8080
@test testf(CuArrays.CUDNN.cudnnActivationBackward, cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)))
81+
82+
# activations defined in src/nnlib.jl
83+
for dims in ((5,5), (5,))
84+
for f in (σ, logσ, elu, swish, gelu, selu, softplus)
85+
@test testf(x -> f.(x), rand(Float64, dims))
86+
end
87+
end
88+
# softplus does not give `Inf` for large arguments
89+
x = cu([1000.])
90+
@test all(softplus.(x) .== x)
8191
end
8292

8393
@testset "Batchnorm" begin

0 commit comments

Comments
 (0)