@@ -19,10 +19,33 @@ the padding to be chosen such that the input and output sizes agree
19
19
When `stride≠1`, the output size equals `ceil(input_size/stride)`.
20
20
21
21
See also [`Conv`](@ref), [`MaxPool`](@ref).
22
+
23
+ # Examples
24
+ ```jldoctest
25
+ julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of images
26
+
27
+ julia> layer = Conv((2,2), 3 => 7, pad=SamePad())
28
+ Conv((2, 2), 3 => 7, pad=(1, 0, 1, 0)) # 91 parameters
29
+
30
+ julia> layer(xs) |> size # notice how the dimensions stay the same with this padding
31
+ (100, 100, 7, 50)
32
+
33
+ julia> layer2 = Conv((2,2), 3 => 7)
34
+ Conv((2, 2), 3 => 7) # 91 parameters
35
+
36
+ julia> layer2(xs) |> size # the output dimension changes as the padding was not "same"
37
+ (99, 99, 7, 50)
38
+
39
+ julia> layer3 = Conv((5, 5), 3 => 7, stride=2, pad=SamePad())
40
+ Conv((5, 5), 3 => 7, pad=2, stride=2) # 532 parameters
41
+
42
+ julia> layer3(xs) |> size # output size = `ceil(input_size/stride)` = 50
43
+ (50, 50, 7, 50)
44
+ ```
22
45
"""
23
46
struct SamePad end
24
47
25
- calc_padding (lt, pad, k:: NTuple{N,T} , dilation, stride) where {T,N}= expand (Val (2 * N), pad)
48
+ calc_padding (lt, pad, k:: NTuple{N,T} , dilation, stride) where {T,N} = expand (Val (2 * N), pad)
26
49
function calc_padding (lt, :: SamePad , k:: NTuple{N,T} , dilation, stride) where {N,T}
27
50
# Ref: "A guide to convolution arithmetic for deep learning" https://arxiv.org/abs/1603.07285
28
51
@@ -108,22 +131,21 @@ end
108
131
Conv(weight::AbstractArray, [bias, activation; stride, pad, dilation])
109
132
110
133
Constructs a convolutional layer with the given weight and bias.
111
- Accepts the same keywords ( and has the same defaults) as the `Conv((4,4), 3 => 7, relu)`
112
- method .
134
+ Accepts the same keywords and has the same defaults as
135
+ [`Conv(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref Conv) .
113
136
114
- # Examples
115
137
```jldoctest
116
138
julia> weight = rand(3, 4, 5);
117
139
118
140
julia> bias = zeros(5);
119
141
120
- julia> c1 = Conv(weight, bias, sigmoid) # expects 1 spatial dimension
142
+ julia> layer = Conv(weight, bias, sigmoid) # expects 1 spatial dimension
121
143
Conv((3,), 4 => 5, σ) # 65 parameters
122
144
123
- julia> c1 (randn(100, 4, 64)) |> size
145
+ julia> layer (randn(100, 4, 64)) |> size
124
146
(98, 5, 64)
125
147
126
- julia> Flux.params(c1 ) |> length
148
+ julia> Flux.params(layer ) |> length
127
149
2
128
150
```
129
151
"""
@@ -154,6 +176,8 @@ channels from `in` to `out`.
154
176
155
177
Accepts the keyword `init` (default: `glorot_uniform`) to control the sampling
156
178
distribution.
179
+
180
+ This is internally used by the [`Conv`](@ref) layer.
157
181
"""
158
182
function convfilter (filter:: NTuple{N,Integer} , ch:: Pair{<:Integer,<:Integer} ;
159
183
init = glorot_uniform, groups = 1 ) where N
@@ -176,7 +200,7 @@ function (c::Conv)(x::AbstractArray)
176
200
σ .(conv (x, c. weight, cdims) .+ conv_reshape_bias (c))
177
201
end
178
202
179
- _channels_in (l :: Conv ) = size (l. weight, ndims (l. weight)- 1 ) * l. groups
203
+ _channels_in (l:: Conv ) = size (l. weight, ndims (l. weight)- 1 ) * l. groups
180
204
_channels_out (l:: Conv ) = size (l. weight, ndims (l. weight))
181
205
182
206
function Base. show (io:: IO , l:: Conv )
@@ -215,10 +239,10 @@ See also [`Conv`](@ref) for more detailed description of keywords.
215
239
```jldoctest
216
240
julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
217
241
218
- julia> lay = ConvTranspose((5,5), 3 => 7, relu)
242
+ julia> layer = ConvTranspose((5,5), 3 => 7, relu)
219
243
ConvTranspose((5, 5), 3 => 7, relu) # 532 parameters
220
244
221
- julia> lay (xs) |> size
245
+ julia> layer (xs) |> size
222
246
(104, 104, 7, 50)
223
247
224
248
julia> ConvTranspose((5,5), 3 => 7, stride=2)(xs) |> size
@@ -244,8 +268,25 @@ _channels_out(l::ConvTranspose) = size(l.weight)[end-1]*l.groups
244
268
"""
245
269
ConvTranspose(weight::AbstractArray, [bias, activation; stride, pad, dilation, groups])
246
270
247
- Constructs a layer with the given weight and bias arrays.
248
- Accepts the same keywords as the `ConvTranspose((4,4), 3 => 7, relu)` method.
271
+ Constructs a ConvTranspose layer with the given weight and bias.
272
+ Accepts the same keywords and has the same defaults as
273
+ [`ConvTranspose(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref ConvTranspose).
274
+
275
+ # Examples
276
+ ```jldoctest
277
+ julia> weight = rand(3, 4, 5);
278
+
279
+ julia> bias = zeros(4);
280
+
281
+ julia> layer = ConvTranspose(weight, bias, sigmoid)
282
+ ConvTranspose((3,), 5 => 4, σ) # 64 parameters
283
+
284
+ julia> layer(randn(100, 5, 64)) |> size # transposed convolution will increase the dimension size (upsampling)
285
+ (102, 4, 64)
286
+
287
+ julia> Flux.params(layer) |> length
288
+ 2
289
+ ```
249
290
"""
250
291
function ConvTranspose (w:: AbstractArray{T,N} , bias = true , σ = identity;
251
292
stride = 1 , pad = 0 , dilation = 1 , groups= 1 ) where {T,N}
@@ -299,7 +340,6 @@ function Base.show(io::IO, l::ConvTranspose)
299
340
print (io, " )" )
300
341
end
301
342
302
-
303
343
function calc_padding (:: Type{ConvTranspose} , pad:: SamePad , k:: NTuple{N,T} , dilation, stride) where {N,T}
304
344
calc_padding (Conv, pad, k .- stride .+ 1 , dilation, stride)
305
345
end
@@ -318,10 +358,10 @@ See [`Conv`](@ref) for a description of the arguments.
318
358
```jldoctest
319
359
julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
320
360
321
- julia> lay = DepthwiseConv((5,5), 3 => 6, relu; bias=false)
361
+ julia> layer = DepthwiseConv((5,5), 3 => 6, relu; bias=false)
322
362
Conv((5, 5), 3 => 6, relu, groups=3, bias=false) # 150 parameters
323
363
324
- julia> lay (xs) |> size
364
+ julia> layer (xs) |> size
325
365
(96, 96, 6, 50)
326
366
327
367
julia> DepthwiseConv((5, 5), 3 => 9, stride=2, pad=2)(xs) |> size
343
383
"""
344
384
CrossCor(filter, in => out, σ=identity; stride=1, pad=0, dilation=1, [bias, init])
345
385
346
- Standard cross convolutional layer. `filter` is a tuple of integers
386
+ Standard cross correlation layer. `filter` is a tuple of integers
347
387
specifying the size of the convolutional kernel;
348
388
`in` and `out` specify the number of input and output channels.
349
389
@@ -357,10 +397,10 @@ See also [`Conv`](@ref) for more detailed description of keywords.
357
397
```jldoctest
358
398
julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
359
399
360
- julia> lay = CrossCor((5,5), 3 => 6, relu; bias=false)
400
+ julia> layer = CrossCor((5,5), 3 => 6, relu; bias=false)
361
401
CrossCor((5, 5), 3 => 6, relu, bias=false) # 450 parameters
362
402
363
- julia> lay (xs) |> size
403
+ julia> layer (xs) |> size
364
404
(96, 96, 6, 50)
365
405
366
406
julia> CrossCor((5,5), 3 => 7, stride=3, pad=(2,0))(xs) |> size
379
419
"""
380
420
CrossCor(weight::AbstractArray, [bias, activation; stride, pad, dilation])
381
421
382
- Constructs a layer with the given weight and bias arrays.
383
- Accepts the same keywords as the `CrossCor((4,4), 3 => 7, relu)` method.
422
+ Constructs a CrossCor layer with the given weight and bias.
423
+ Accepts the same keywords and has the same defaults as
424
+ [`CrossCor(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref CrossCor).
425
+
426
+ # Examples
427
+ ```jldoctest
428
+ julia> weight = rand(3, 4, 5);
429
+
430
+ julia> bias = zeros(5);
431
+
432
+ julia> layer = CrossCor(weight, bias, relu)
433
+ CrossCor((3,), 4 => 5, relu) # 65 parameters
434
+
435
+ julia> layer(randn(100, 4, 64)) |> size
436
+ (98, 5, 64)
437
+ ```
384
438
"""
385
439
function CrossCor (w:: AbstractArray{T,N} , bias = true , σ = identity;
386
440
stride = 1 , pad = 0 , dilation = 1 ) where {T,N}
@@ -611,10 +665,10 @@ julia> m[1](xs) |> size
611
665
julia> m(xs) |> size
612
666
(20, 20, 7, 50)
613
667
614
- julia> lay = MaxPool((5,), pad=2, stride=(3,)) # one-dimensional window
668
+ julia> layer = MaxPool((5,), pad=2, stride=(3,)) # one-dimensional window
615
669
MaxPool((5,), pad=2, stride=3)
616
670
617
- julia> lay (rand(Float32, 100, 7, 50)) |> size
671
+ julia> layer (rand(Float32, 100, 7, 50)) |> size
618
672
(34, 7, 50)
619
673
```
620
674
"""
0 commit comments