File tree Expand file tree Collapse file tree 2 files changed +6
-13
lines changed Expand file tree Collapse file tree 2 files changed +6
-13
lines changed Original file line number Diff line number Diff line change @@ -107,14 +107,12 @@ function (a::AlphaDropout)(x::AbstractArray{T}) where T
107
107
iszero (p) && return x
108
108
isone (p) && return sign .(x) .* T (0 )
109
109
110
- λ = T (1.0507009873554804934193349852946 )
111
- α = T (1.6732632423543772848170429916717 )
112
- α1 = T (- λ * α)
113
- A = inv (sqrt ((1 - p) * (1 + p * α1^ 2 )))
114
- B = - A * α1 * p
110
+ α′ = T (- 1.7580993408473766 ) # selu(-Inf) == -λα
111
+ A = T (inv (sqrt ((1 - p) * (1 + p * α′^ 2 ))))
112
+ B = T (- A * α′ * p)
115
113
116
114
noise = rand! (similar (x))
117
- return A .* ifelse .(noise .> p, x, α1 ) .+ B
115
+ return A .* ifelse .(noise .> p, x, α′ ) .+ B
118
116
end
119
117
120
118
testmode! (m:: AlphaDropout , mode= true ) =
Original file line number Diff line number Diff line change 10
10
@test gradient (x -> sum (cpu (x)), gpu (rand (3 ,3 ))) isa Tuple
11
11
end
12
12
13
- # TODO : These layers get into scalar indexing
14
- # `AlphaDropout` throws a compilation error on GPUs,
15
- # whereas, the rest are scalar indexing issues.
16
- # The norm layers behave differently on the CPU and
17
- # the GPU too.
18
- const BROKEN_LAYERS = Union{DepthwiseConv,
19
- AlphaDropout}
13
+ # TODO : These layers get into scalar indexing issues.
14
+ const BROKEN_LAYERS = Union{DepthwiseConv}
20
15
21
16
const ACTIVATIONS = [identity, relu, tanh,
22
17
sigmoid, exp, softplus,
You can’t perform that action at this time.
0 commit comments