Skip to content

Commit c16291f

Browse files
authored
Adjust docs & Flux.@functor for Functors.jl v0.5, plus misc. depwarns (#2509)
1 parent e2f58a8 commit c16291f

File tree

2 files changed

+52
-7
lines changed

2 files changed

+52
-7
lines changed

src/deprecations.jl

Lines changed: 44 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -83,16 +83,48 @@ function params!(p::Zygote.Params, x, seen = IdSet())
8383
end
8484
end
8585

86+
"""
87+
params(model)
88+
89+
Returns a `Zygote.Params` object containing all parameter arrays from the model.
90+
This is deprecated!
91+
92+
This function was the cornerstone of how Flux used Zygote's implicit mode gradients,
93+
but since Flux 0.13 we use explicit mode `gradient(m -> loss(m, x, y), model)` instead.
94+
95+
To collect all the parameter arrays for other purposes, use `Flux.trainables(model)`.
96+
"""
8697
function params(m...)
87-
Base.depwarn("""
88-
Flux.params(m...) is deprecated. Use `Flux.trainable(model)` for parameters' collection
89-
and the explicit `gradient(m -> loss(m, x, y), model)` for gradient computation.
90-
""", :params)
98+
@warn """`Flux.params(m...)` is deprecated. Use `Flux.trainable(model)` for parameter collection,
99+
and the explicit `gradient(m -> loss(m, x, y), model)` for gradient computation.""" maxlog=1
91100
ps = Params()
92101
params!(ps, m)
93102
return ps
94103
end
95104

105+
106+
"""
107+
@functor MyLayer
108+
109+
Flux used to require the use of `Functors.@functor` to mark any new layer-like struct.
110+
This allowed it to explore inside the struct, and update any trainable parameters within.
111+
Flux@0.15 removes this requirement. This is because Functors@0.5 changed ist behaviour
112+
to be opt-out instead of opt-in. Arbitrary structs will now be explored without special marking.
113+
Hence calling `@functor` is no longer required.
114+
115+
Calling `Flux.@layer MyLayer` is, however, still recommended. This adds various convenience methods
116+
for your layer type, such as pretty printing, and use with Adapt.jl.
117+
"""
118+
macro functor(ex)
119+
@warn """The use of `Flux.@functor` is deprecated.
120+
Most likely, you should write `Flux.@layer MyLayer` which will add various convenience methods for your type,
121+
such as pretty-printing, and use with Adapt.jl.
122+
However, this is not required. Flux.jl v0.15 uses Functors.jl v0.5, which makes exploration of most nested `struct`s
123+
opt-out instead of opt-in... so Flux will automatically see inside any custom struct definitions.
124+
""" maxlog=1
125+
_layer_macro(ex)
126+
end
127+
96128
# Allows caching of the parameters when params is called within gradient() to fix #2040.
97129
# @non_differentiable params(m...) # https://github.com/FluxML/Flux.jl/pull/2054
98130
# That speeds up implicit use, and silently breaks explicit use.
@@ -101,6 +133,14 @@ Zygote._pullback(::Zygote.Context{true}, ::typeof(params), m...) = params(m), _
101133

102134
include("optimise/Optimise.jl") ## deprecated Module
103135

136+
function Optimiser(rules...)
137+
@warn "`Flux.Optimiser(...)` has been removed, please call `OptimiserChain(...)`, exported by Flux from Optimisers.jl" maxlog=1
138+
OptimiserChain(rules...)
139+
end
140+
function ClipValue(val)
141+
@warn "`Flux.ClipValue(...)` has been removed, please call `ClipGrad(...)`, exported by Flux from Optimisers.jl" maxlog=1
142+
ClipGrad(val)
143+
end
104144

105145
# TODO this friendly error should go in Optimisers.jl.
106146
# remove after https://github.com/FluxML/Optimisers.jl/pull/181
@@ -119,9 +159,6 @@ end
119159
### v0.16 deprecations ####################
120160

121161

122-
# Enable these when 0.16 is released, and delete const ClipGrad = Optimise.ClipValue etc:
123-
# Base.@deprecate_binding Optimiser OptimiserChain
124-
# Base.@deprecate_binding ClipValue ClipGrad
125162

126163
# train!(loss::Function, ps::Zygote.Params, data, opt) = throw(ArgumentError(
127164
# """On Flux 0.16, `train!` no longer accepts implicit `Zygote.Params`.

src/optimise/train.jl

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,10 @@ function update!(opt::AbstractOptimiser, x::AbstractArray, x̄)
55
end
66

77
function update!(opt::AbstractOptimiser, xs::Params, gs)
8+
@warn """The method `Flux.update!(optimiser, ps::Params, grads)` is deprecated,
9+
as part of Flux's move away from Zyote's implicit mode.
10+
Please use explicit-style `update!(opt_state, model, grad)` instead,
11+
where `grad = Flux.gradient(m -> loss(m,x,y), model)` and `opt_state = Flux.setup(rule, model)`.""" maxlog=1
812
for x in xs
913
isnothing(gs[x]) && continue
1014
update!(opt, x, gs[x])
@@ -21,6 +25,10 @@ batchmemaybe(x) = tuple(x)
2125
batchmemaybe(x::Tuple) = x
2226

2327
function train!(loss, ps::Params, data, opt::AbstractOptimiser; cb = () -> ())
28+
@warn """The method `Flux.train!(loss2, ps::Params, data, optimiser)` is deprecated,
29+
as part of Flux's move away from Zyote's implicit parameters.
30+
Please use explicit-style `train!(loss, model, data, opt_state)` instead,
31+
where `loss(m, xy...)` accepts the model, and `opt_state = Flux.setup(rule, model)`.""" maxlog=1
2432
cb = runall(cb)
2533
itrsz = Base.IteratorSize(typeof(data))
2634
n = (itrsz == Base.HasLength()) || (itrsz == Base.HasShape{1}()) ? length(data) : 0

0 commit comments

Comments
 (0)