Skip to content

Commit 297b822

Browse files
committed
Specify signature of constructors, use uniform variable names
1 parent da5a91d commit 297b822

File tree

1 file changed

+22
-24
lines changed

1 file changed

+22
-24
lines changed

src/layers/conv.jl

Lines changed: 22 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -131,20 +131,21 @@ end
131131
Conv(weight::AbstractArray, [bias, activation; stride, pad, dilation])
132132
133133
Constructs a convolutional layer with the given weight and bias.
134-
Accepts the same keywords and has the same defaults as `Conv((4,4), 3 => 7, relu)`.
134+
Accepts the same keywords and has the same defaults as
135+
[`Conv(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref Conv).
135136
136137
```jldoctest
137138
julia> weight = rand(3, 4, 5);
138139
139140
julia> bias = zeros(5);
140141
141-
julia> c1 = Conv(weight, bias, sigmoid) # expects 1 spatial dimension
142+
julia> layer = Conv(weight, bias, sigmoid) # expects 1 spatial dimension
142143
Conv((3,), 4 => 5, σ) # 65 parameters
143144
144-
julia> c1(randn(100, 4, 64)) |> size
145+
julia> layer(randn(100, 4, 64)) |> size
145146
(98, 5, 64)
146147
147-
julia> Flux.params(c1) |> length
148+
julia> Flux.params(layer) |> length
148149
2
149150
```
150151
"""
@@ -238,10 +239,10 @@ See also [`Conv`](@ref) for more detailed description of keywords.
238239
```jldoctest
239240
julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
240241
241-
julia> lay = ConvTranspose((5,5), 3 => 7, relu)
242+
julia> layer = ConvTranspose((5,5), 3 => 7, relu)
242243
ConvTranspose((5, 5), 3 => 7, relu) # 532 parameters
243244
244-
julia> lay(xs) |> size
245+
julia> layer(xs) |> size
245246
(104, 104, 7, 50)
246247
247248
julia> ConvTranspose((5,5), 3 => 7, stride=2)(xs) |> size
@@ -268,21 +269,22 @@ _channels_out(l::ConvTranspose) = size(l.weight)[end-1]*l.groups
268269
ConvTranspose(weight::AbstractArray, [bias, activation; stride, pad, dilation, groups])
269270
270271
Constructs a ConvTranspose layer with the given weight and bias.
271-
Accepts the same keywords and has the same defaults as `ConvTranspose((4,4), 3 => 7, relu)`.
272+
Accepts the same keywords and has the same defaults as
273+
[`ConvTranspose(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref ConvTranspose).
272274
273275
# Examples
274276
```jldoctest
275277
julia> weight = rand(3, 4, 5);
276278
277279
julia> bias = zeros(4);
278280
279-
julia> c1 = ConvTranspose(weight, bias, sigmoid)
281+
julia> layer = ConvTranspose(weight, bias, sigmoid)
280282
ConvTranspose((3,), 5 => 4, σ) # 64 parameters
281283
282-
julia> c1(randn(100, 5, 64)) |> size # transposed convolution will increase the dimension size (upsampling)
284+
julia> layer(randn(100, 5, 64)) |> size # transposed convolution will increase the dimension size (upsampling)
283285
(102, 4, 64)
284286
285-
julia> Flux.params(c1) |> length
287+
julia> Flux.params(layer) |> length
286288
2
287289
```
288290
"""
@@ -356,10 +358,10 @@ See [`Conv`](@ref) for a description of the arguments.
356358
```jldoctest
357359
julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
358360
359-
julia> lay = DepthwiseConv((5,5), 3 => 6, relu; bias=false)
361+
julia> layer = DepthwiseConv((5,5), 3 => 6, relu; bias=false)
360362
Conv((5, 5), 3 => 6, relu, groups=3, bias=false) # 150 parameters
361363
362-
julia> lay(xs) |> size
364+
julia> layer(xs) |> size
363365
(96, 96, 6, 50)
364366
365367
julia> DepthwiseConv((5, 5), 3 => 9, stride=2, pad=2)(xs) |> size
@@ -388,21 +390,17 @@ specifying the size of the convolutional kernel;
388390
Parameters are controlled by additional keywords, with defaults
389391
`init=glorot_uniform` and `bias=true`.
390392
391-
CrossCor layer can also be manually constructed by passing in weights and
392-
biases. This constructor accepts the layer accepts the same keywords (and has
393-
the same defaults) as the `CrossCor((4,4), 3 => 7, relu)` method.
394-
395393
See also [`Conv`](@ref) for more detailed description of keywords.
396394
397395
# Examples
398396
399397
```jldoctest
400398
julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
401399
402-
julia> lay = CrossCor((5,5), 3 => 6, relu; bias=false)
400+
julia> layer = CrossCor((5,5), 3 => 6, relu; bias=false)
403401
CrossCor((5, 5), 3 => 6, relu, bias=false) # 450 parameters
404402
405-
julia> lay(xs) |> size
403+
julia> layer(xs) |> size
406404
(96, 96, 6, 50)
407405
408406
julia> CrossCor((5,5), 3 => 7, stride=3, pad=(2,0))(xs) |> size
@@ -422,20 +420,20 @@ end
422420
CrossCor(weight::AbstractArray, [bias, activation; stride, pad, dilation])
423421
424422
Constructs a CrossCor layer with the given weight and bias.
425-
Accepts the same keywords and has the same defaults as `CrossCor((5,5), 3 => 6, relu)`.
423+
Accepts the same keywords and has the same defaults as
424+
[`CrossCor(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref CrossCor).
426425
427426
# Examples
428427
```jldoctest
429428
julia> weight = rand(3, 4, 5);
430429
431430
julia> bias = zeros(5);
432431
433-
julia> lay = CrossCor(weight, bias, relu)
432+
julia> layer = CrossCor(weight, bias, relu)
434433
CrossCor((3,), 4 => 5, relu) # 65 parameters
435434
436-
julia> lay(randn(100, 4, 64)) |> size
435+
julia> layer(randn(100, 4, 64)) |> size
437436
(98, 5, 64)
438-
439437
```
440438
"""
441439
function CrossCor(w::AbstractArray{T,N}, bias = true, σ = identity;
@@ -667,10 +665,10 @@ julia> m[1](xs) |> size
667665
julia> m(xs) |> size
668666
(20, 20, 7, 50)
669667
670-
julia> lay = MaxPool((5,), pad=2, stride=(3,)) # one-dimensional window
668+
julia> layer = MaxPool((5,), pad=2, stride=(3,)) # one-dimensional window
671669
MaxPool((5,), pad=2, stride=3)
672670
673-
julia> lay(rand(Float32, 100, 7, 50)) |> size
671+
julia> layer(rand(Float32, 100, 7, 50)) |> size
674672
(34, 7, 50)
675673
```
676674
"""

0 commit comments

Comments
 (0)