@@ -83,16 +83,48 @@ function params!(p::Zygote.Params, x, seen = IdSet())
83
83
end
84
84
end
85
85
86
+ """
87
+ params(model)
88
+
89
+ Returns a `Zygote.Params` object containing all parameter arrays from the model.
90
+ This is deprecated!
91
+
92
+ This function was the cornerstone of how Flux used Zygote's implicit mode gradients,
93
+ but since Flux 0.13 we use explicit mode `gradient(m -> loss(m, x, y), model)` instead.
94
+
95
+ To collect all the parameter arrays for other purposes, use `Flux.trainables(model)`.
96
+ """
86
97
function params (m... )
87
- Base. depwarn ("""
88
- Flux.params(m...) is deprecated. Use `Flux.trainable(model)` for parameters' collection
89
- and the explicit `gradient(m -> loss(m, x, y), model)` for gradient computation.
90
- """ , :params )
98
+ @warn """ `Flux.params(m...)` is deprecated. Use `Flux.trainable(model)` for parameter collection,
99
+ and the explicit `gradient(m -> loss(m, x, y), model)` for gradient computation.""" maxlog= 1
91
100
ps = Params ()
92
101
params! (ps, m)
93
102
return ps
94
103
end
95
104
105
+
106
+ """
107
+ @functor MyLayer
108
+
109
+ Flux used to require the use of `Functors.@functor` to mark any new layer-like struct.
110
+ This allowed it to explore inside the struct, and update any trainable parameters within.
111
+ Flux@0.15 removes this requirement. This is because Functors@0.5 changed ist behaviour
112
+ to be opt-out instead of opt-in. Arbitrary structs will now be explored without special marking.
113
+ Hence calling `@functor` is no longer required.
114
+
115
+ Calling `Flux.@layer MyLayer` is, however, still recommended. This adds various convenience methods
116
+ for your layer type, such as pretty printing, and use with Adapt.jl.
117
+ """
118
+ macro functor (ex)
119
+ @warn """ The use of `Flux.@functor` is deprecated.
120
+ Most likely, you should write `Flux.@layer MyLayer` which will add various convenience methods for your type,
121
+ such as pretty-printing, and use with Adapt.jl.
122
+ However, this is not required. Flux.jl v0.15 uses Functors.jl v0.5, which makes exploration of most nested `struct`s
123
+ opt-out instead of opt-in... so Flux will automatically see inside any custom struct definitions.
124
+ """ maxlog= 1
125
+ _layer_macro (ex)
126
+ end
127
+
96
128
# Allows caching of the parameters when params is called within gradient() to fix #2040.
97
129
# @non_differentiable params(m...) # https://github.com/FluxML/Flux.jl/pull/2054
98
130
# That speeds up implicit use, and silently breaks explicit use.
@@ -101,6 +133,14 @@ Zygote._pullback(::Zygote.Context{true}, ::typeof(params), m...) = params(m), _
101
133
102
134
include (" optimise/Optimise.jl" ) # # deprecated Module
103
135
136
+ function Optimiser (rules... )
137
+ @warn " `Flux.Optimiser(...)` has been removed, please call `OptimiserChain(...)`, exported by Flux from Optimisers.jl" maxlog= 1
138
+ OptimiserChain (rules... )
139
+ end
140
+ function ClipValue (val)
141
+ @warn " `Flux.ClipValue(...)` has been removed, please call `ClipGrad(...)`, exported by Flux from Optimisers.jl" maxlog= 1
142
+ ClipGrad (val)
143
+ end
104
144
105
145
# TODO this friendly error should go in Optimisers.jl.
106
146
# remove after https://github.com/FluxML/Optimisers.jl/pull/181
119
159
# ## v0.16 deprecations ####################
120
160
121
161
122
- # Enable these when 0.16 is released, and delete const ClipGrad = Optimise.ClipValue etc:
123
- # Base.@deprecate_binding Optimiser OptimiserChain
124
- # Base.@deprecate_binding ClipValue ClipGrad
125
162
126
163
# train!(loss::Function, ps::Zygote.Params, data, opt) = throw(ArgumentError(
127
164
# """On Flux 0.16, `train!` no longer accepts implicit `Zygote.Params`.
0 commit comments