Skip to content

Commit db90581

Browse files
author
cossio
committed
spacing
1 parent 69afb67 commit db90581

File tree

2 files changed

+17
-17
lines changed

2 files changed

+17
-17
lines changed

src/utils.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -285,7 +285,7 @@ sparse_init(rng::AbstractRNG; init_kwargs...) = (dims...; kwargs...) -> sparse_i
285285
"""
286286
identity_init([rng=GLOBAL_RNG], dims...; gain=1, shift=0)
287287
288-
Return an `Array` of size `dims` which yields an identity mapping when used as parameters in
288+
Return an `Array` of size `dims` which yields an identity mapping when used as parameters in
289289
most Flux layers. Use `gain` to scale the identity by a constant.
290290
291291
Often useful in the context of transfer learning, i.e when one wants to add more capacity to
@@ -297,10 +297,10 @@ Equivalent to `Base.circshift(identity(dims...), shift)`.
297297
Some caveats: Not all layers will be identity mapping when used with this init. Exceptions
298298
include recurrent layers, `DepthwiseConv` and normalization layers.
299299
300-
Also note that layers must have `input_size == output_size` for identity mapping to be
300+
Also note that layers must have `input_size == output_size` for identity mapping to be
301301
possible. When this is not the case, extra dimensions of the array are padded with zeros.
302302
303-
For convolutional layers, in addition to the above, the kernel sizes must also be odd and
303+
For convolutional layers, in addition to the above, the kernel sizes must also be odd and
304304
padding must be applied so that output feature maps have the same size as input feature maps,
305305
e.g by using [`SamePad`](@ref).
306306
@@ -574,7 +574,7 @@ See also [`unstack`](@ref).
574574
# Examples
575575
576576
```jldoctest
577-
julia> Flux.unbatch([1 3 5 7;
577+
julia> Flux.unbatch([1 3 5 7;
578578
2 4 6 8])
579579
4-element Vector{Vector{Int64}}:
580580
[1, 2]

test/utils.jl

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
using Flux
22
using Flux: throttle, nfan, glorot_uniform, glorot_normal,
3-
kaiming_normal, kaiming_uniform, orthogonal,
3+
kaiming_normal, kaiming_uniform, orthogonal,
44
sparse_init, stack, unstack, Zeros, batch, unbatch
55
using StatsBase: var, std
66
using Random
@@ -178,10 +178,10 @@ end
178178

179179
@testset "$layer ID mapping with kernelsize $kernelsize" for layer in (Conv, ConvTranspose, CrossCor), kernelsize in (
180180
(1,),
181-
(3,),
182-
(1, 3),
183-
(3, 5),
184-
(3, 5, 7))
181+
(3,),
182+
(1, 3),
183+
(3, 5),
184+
(3, 5, 7))
185185
nch = 3
186186
l = layer(kernelsize, nch=>nch, init=identity_init, pad=SamePad())
187187

@@ -333,9 +333,9 @@ end
333333

334334

335335
@testset "Batching" begin
336-
stacked_array=[ 8 9 3 5
337-
9 6 6 9
338-
9 1 7 2
336+
stacked_array=[ 8 9 3 5
337+
9 6 6 9
338+
9 1 7 2
339339
7 4 10 6 ]
340340
unstacked_array=[[8, 9, 9, 7], [9, 6, 1, 4], [3, 6, 7, 10], [5, 9, 2, 6]]
341341
@test unbatch(stacked_array) == unstacked_array
@@ -445,7 +445,7 @@ end
445445

446446
modules = Flux.modules(Chain(SkipConnection(
447447
Conv((2,3), 4=>5; pad=6, stride=7),
448-
+),
448+
+),
449449
LayerNorm(8)))
450450
@test length(modules) == 5
451451
end
@@ -475,16 +475,16 @@ end
475475
@testset "early stopping" begin
476476
@testset "args & kwargs" begin
477477
es = Flux.early_stopping((x; y = 1) -> x + y, 10; min_dist=3)
478-
478+
479479
n_iter = 0
480480
while n_iter < 99
481481
es(-n_iter; y=-n_iter) && break
482482
n_iter += 1
483483
end
484-
484+
485485
@test n_iter == 9
486486
end
487-
487+
488488
@testset "distance" begin
489489
es = Flux.early_stopping(identity, 10; distance=(best_score, score) -> score - best_score)
490490

@@ -496,7 +496,7 @@ end
496496

497497
@test n_iter == 99
498498
end
499-
499+
500500
@testset "init_score" begin
501501
es = Flux.early_stopping(identity, 10; init_score=10)
502502

0 commit comments

Comments
 (0)