We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 185ab40 commit 1242c20Copy full SHA for 1242c20
src/layers/normalise.jl
@@ -31,7 +31,7 @@ The [`Dropout`](@ref) layer is what you should use in most scenarios.
31
function dropout(x, p; dims=:, active::Bool=true)
32
active || return x
33
y = rand!(similar(x, _dropout_shape(x, dims)))
34
- @inbounds @. y = x * _dropout_kernel(y, p, 1-p)
+ @. y = x * _dropout_kernel(y, p, 1-p)
35
end
36
37
@adjoint function dropout(x, p; dims=:, active::Bool=true)
@@ -56,7 +56,7 @@ e.g. `Dropout(p; dims = 3)` will randomly zero out entire channels on WHCN input
56
(also called 2D dropout).
57
58
Does nothing to the input once [`Flux.testmode!`](@ref) is `true`.
59
-"""`
+"""
60
mutable struct Dropout{F,D}
61
p::F
62
dims::D
0 commit comments