@@ -131,20 +131,21 @@ end
131
131
Conv(weight::AbstractArray, [bias, activation; stride, pad, dilation])
132
132
133
133
Constructs a convolutional layer with the given weight and bias.
134
- Accepts the same keywords and has the same defaults as `Conv((4,4), 3 => 7, relu)`.
134
+ Accepts the same keywords and has the same defaults as
135
+ [`Conv(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref Conv).
135
136
136
137
```jldoctest
137
138
julia> weight = rand(3, 4, 5);
138
139
139
140
julia> bias = zeros(5);
140
141
141
- julia> c1 = Conv(weight, bias, sigmoid) # expects 1 spatial dimension
142
+ julia> layer = Conv(weight, bias, sigmoid) # expects 1 spatial dimension
142
143
Conv((3,), 4 => 5, σ) # 65 parameters
143
144
144
- julia> c1 (randn(100, 4, 64)) |> size
145
+ julia> layer (randn(100, 4, 64)) |> size
145
146
(98, 5, 64)
146
147
147
- julia> Flux.params(c1 ) |> length
148
+ julia> Flux.params(layer ) |> length
148
149
2
149
150
```
150
151
"""
@@ -238,10 +239,10 @@ See also [`Conv`](@ref) for more detailed description of keywords.
238
239
```jldoctest
239
240
julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
240
241
241
- julia> lay = ConvTranspose((5,5), 3 => 7, relu)
242
+ julia> layer = ConvTranspose((5,5), 3 => 7, relu)
242
243
ConvTranspose((5, 5), 3 => 7, relu) # 532 parameters
243
244
244
- julia> lay (xs) |> size
245
+ julia> layer (xs) |> size
245
246
(104, 104, 7, 50)
246
247
247
248
julia> ConvTranspose((5,5), 3 => 7, stride=2)(xs) |> size
@@ -268,21 +269,22 @@ _channels_out(l::ConvTranspose) = size(l.weight)[end-1]*l.groups
268
269
ConvTranspose(weight::AbstractArray, [bias, activation; stride, pad, dilation, groups])
269
270
270
271
Constructs a ConvTranspose layer with the given weight and bias.
271
- Accepts the same keywords and has the same defaults as `ConvTranspose((4,4), 3 => 7, relu)`.
272
+ Accepts the same keywords and has the same defaults as
273
+ [`ConvTranspose(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref ConvTranspose).
272
274
273
275
# Examples
274
276
```jldoctest
275
277
julia> weight = rand(3, 4, 5);
276
278
277
279
julia> bias = zeros(4);
278
280
279
- julia> c1 = ConvTranspose(weight, bias, sigmoid)
281
+ julia> layer = ConvTranspose(weight, bias, sigmoid)
280
282
ConvTranspose((3,), 5 => 4, σ) # 64 parameters
281
283
282
- julia> c1 (randn(100, 5, 64)) |> size # transposed convolution will increase the dimension size (upsampling)
284
+ julia> layer (randn(100, 5, 64)) |> size # transposed convolution will increase the dimension size (upsampling)
283
285
(102, 4, 64)
284
286
285
- julia> Flux.params(c1 ) |> length
287
+ julia> Flux.params(layer ) |> length
286
288
2
287
289
```
288
290
"""
@@ -356,10 +358,10 @@ See [`Conv`](@ref) for a description of the arguments.
356
358
```jldoctest
357
359
julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
358
360
359
- julia> lay = DepthwiseConv((5,5), 3 => 6, relu; bias=false)
361
+ julia> layer = DepthwiseConv((5,5), 3 => 6, relu; bias=false)
360
362
Conv((5, 5), 3 => 6, relu, groups=3, bias=false) # 150 parameters
361
363
362
- julia> lay (xs) |> size
364
+ julia> layer (xs) |> size
363
365
(96, 96, 6, 50)
364
366
365
367
julia> DepthwiseConv((5, 5), 3 => 9, stride=2, pad=2)(xs) |> size
@@ -388,21 +390,17 @@ specifying the size of the convolutional kernel;
388
390
Parameters are controlled by additional keywords, with defaults
389
391
`init=glorot_uniform` and `bias=true`.
390
392
391
- CrossCor layer can also be manually constructed by passing in weights and
392
- biases. This constructor accepts the layer accepts the same keywords (and has
393
- the same defaults) as the `CrossCor((4,4), 3 => 7, relu)` method.
394
-
395
393
See also [`Conv`](@ref) for more detailed description of keywords.
396
394
397
395
# Examples
398
396
399
397
```jldoctest
400
398
julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
401
399
402
- julia> lay = CrossCor((5,5), 3 => 6, relu; bias=false)
400
+ julia> layer = CrossCor((5,5), 3 => 6, relu; bias=false)
403
401
CrossCor((5, 5), 3 => 6, relu, bias=false) # 450 parameters
404
402
405
- julia> lay (xs) |> size
403
+ julia> layer (xs) |> size
406
404
(96, 96, 6, 50)
407
405
408
406
julia> CrossCor((5,5), 3 => 7, stride=3, pad=(2,0))(xs) |> size
@@ -422,20 +420,20 @@ end
422
420
CrossCor(weight::AbstractArray, [bias, activation; stride, pad, dilation])
423
421
424
422
Constructs a CrossCor layer with the given weight and bias.
425
- Accepts the same keywords and has the same defaults as `CrossCor((5,5), 3 => 6, relu)`.
423
+ Accepts the same keywords and has the same defaults as
424
+ [`CrossCor(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref CrossCor).
426
425
427
426
# Examples
428
427
```jldoctest
429
428
julia> weight = rand(3, 4, 5);
430
429
431
430
julia> bias = zeros(5);
432
431
433
- julia> lay = CrossCor(weight, bias, relu)
432
+ julia> layer = CrossCor(weight, bias, relu)
434
433
CrossCor((3,), 4 => 5, relu) # 65 parameters
435
434
436
- julia> lay (randn(100, 4, 64)) |> size
435
+ julia> layer (randn(100, 4, 64)) |> size
437
436
(98, 5, 64)
438
-
439
437
```
440
438
"""
441
439
function CrossCor (w:: AbstractArray{T,N} , bias = true , σ = identity;
@@ -667,10 +665,10 @@ julia> m[1](xs) |> size
667
665
julia> m(xs) |> size
668
666
(20, 20, 7, 50)
669
667
670
- julia> lay = MaxPool((5,), pad=2, stride=(3,)) # one-dimensional window
668
+ julia> layer = MaxPool((5,), pad=2, stride=(3,)) # one-dimensional window
671
669
MaxPool((5,), pad=2, stride=3)
672
670
673
- julia> lay (rand(Float32, 100, 7, 50)) |> size
671
+ julia> layer (rand(Float32, 100, 7, 50)) |> size
674
672
(34, 7, 50)
675
673
```
676
674
"""
0 commit comments