File tree Expand file tree Collapse file tree 3 files changed +12
-4
lines changed Expand file tree Collapse file tree 3 files changed +12
-4
lines changed Original file line number Diff line number Diff line change 130
130
_old_to_new (rule:: Optimiser ) = Optimisers. OptimiserChain (map (_old_to_new, rule. os)... )
131
131
const OptimiserChain = Optimise. Optimiser # lets you use new name with implicit params too.
132
132
_old_to_new (rule:: WeightDecay ) = Optimisers. WeightDecay (rule. wd) # called gamma now
133
- _old_to_new (rule:: ClipNorm ) = Optimisers. ClipNorm (rule. thesh ) # called omega, and there are more fields
134
- _old_to_new (rule:: ClipValue ) = Optimisers. ClipGrad (rule. thesh ) # called delta now, and struct name differs
133
+ _old_to_new (rule:: ClipNorm ) = Optimisers. ClipNorm (rule. thresh ) # called omega, and there are more fields
134
+ _old_to_new (rule:: ClipValue ) = Optimisers. ClipGrad (rule. thresh ) # called delta now, and struct name differs
135
135
const ClipGrad = Optimise. ClipValue
136
136
_old_to_new (rule:: RMSProp ) = Optimisers. RMSProp (rule. eta, rule. rho, rule. epsilon) # RMSProp has no field centred
137
137
Original file line number Diff line number Diff line change @@ -2,7 +2,7 @@ module Train
2
2
3
3
using LinearAlgebra
4
4
using Optimisers: Optimisers
5
- using Functors: fmap
5
+ using Functors: fmap, fmapstructure
6
6
7
7
import .. Flux. Optimise: train!, update! # during 0.13, we add methods to the old functions
8
8
@@ -48,7 +48,8 @@ julia> opt_state # mutated by Flux.train!
48
48
"""
49
49
function setup (rule:: Optimisers.AbstractRule , model)
50
50
state = Optimisers. setup (rule, model)
51
- fmap (model, exclude = Optimisers. isnumeric) do x
51
+ # This check only needs foreach; using fmap caused https://github.com/FluxML/Flux.jl/issues/2144
52
+ fmapstructure (model, exclude = Optimisers. isnumeric) do x
52
53
Optimisers. maywrite (x) || error (""" model must be fully mutable for `train!` to work, got `x::$(typeof (x)) `.
53
54
If `x .+= dx` is in fact ok, define `Optimisers.maywrite(::$(typeof (x)) ) = true`""" )
54
55
end
Original file line number Diff line number Diff line change 139
139
@test diff1 ≈ diff3
140
140
end
141
141
142
+ @testset " Flux.setup bugs" begin
143
+ # https://github.com/FluxML/Flux.jl/issues/2144
144
+ @test Flux. setup (Flux. Adam (), Embedding (3 => 1 )). weight isa Optimisers. Leaf
145
+ # Typo in 0.13.9's deprecation
146
+ @test Flux. setup (Flux. ClipValue (1 ), Dense (2 => 3 )). weight. rule isa Optimisers. ClipGrad
147
+ @test Flux. setup (Flux. ClipNorm (1 ), Dense (2 => 3 )). weight. rule isa Optimisers. ClipNorm
148
+ end
You can’t perform that action at this time.
0 commit comments