4
4
@layer :expand Chain
5
5
@layer BatchNorm trainable=(β,γ)
6
6
@layer Struct children=(α,β) trainable=(β,)
7
-
8
- This macro replaces most uses of `@functor` in Flux 0.14 . Its basic purpose is the same:
7
+
8
+ This macro replaces most uses of `@functor`. Its basic purpose is the same:
9
9
When you define a new layer, this tells Flux to explore inside it
10
10
to see the parameters it trains, and also to move them to the GPU, change precision, etc.
11
11
Like `@functor`, this assumes your struct has the default constructor, to enable re-building.
12
12
13
- Some "keywords" allow control of the recursion:
13
+ Some "keywords" allow control of the recursion.
14
14
* If some fields look like parameters but should not be trained,
15
15
then `trainable` lets you specify fields to include, and ignore the rest.
16
16
* You can likewise add restructions to Functors's `children` (although this is seldom a good idea).
17
17
18
+ The defaults are `fieldnames(T)` for both. They must be subsets of this, and `trainable` must be a subset of `children`.
19
+
18
20
It also handles overloads of `show` for pretty printing.
19
21
* By default, it adds methods to 3-arg `Base.show` to treat your layer much like `Dense` or `Conv`.
20
22
* If your layer is a container, more like `Chain` or `Parallel`, then `:expand` makes `show` unfold its contents.
21
23
* To disable all `show` overloads, there is an `:ignore` option too.
22
24
25
+ Note that re-running the macro with different options does not overwrite all methods, you will need to restart.
23
26
(You probably still want to define 2-arg `show(io::IO, x::Layer)`, the macro does not touch this.)
24
27
25
- Note that re-running the macro with different options does not overwrite all methods, you will need to restart.
26
-
27
28
# Example
28
29
```jldoctest
29
30
julia> struct Trio; a; b; c end
30
31
31
- julia> tri = Trio(Dense([1.1 2.2],), Dense([3.3;;], false), Dropout(0.4))
32
- Trio(Dense(1 => 1, tanh), Dense(1 => 1; bias=false), Dropout(0.4))
32
+ julia> tri = Trio(Dense([1.1 2.2], [0.0], tanh ), Dense([3.3;;], false), Dropout(0.4))
33
+ Trio(Dense(2 => 1, tanh), Dense(1 => 1; bias=false), Dropout(0.4))
33
34
34
- julia> Flux.destructure(tri) # parameters not visible to Flux
35
+ julia> Flux.destructure(tri) # parameters are not yet visible to Flux
35
36
(Bool[], Restructure(Trio, ..., 0))
36
37
37
38
julia> Flux.@layer :expand Trio
38
39
39
40
julia> Flux.destructure(tri) # now gpu, train!, etc will see inside too
40
41
([1.1, 2.2, 0.0, 3.3], Restructure(Trio, ..., 4))
41
42
42
- julia> tri
43
+ julia> tri # and layer is printed like Chain
43
44
Trio(
44
45
Dense(2 => 1), # 3 parameters
45
46
Dense(1 => 1; bias=false), # 1 parameters
@@ -58,7 +59,7 @@ macro layer(exs...)
58
59
elseif exs[1 ] == QuoteNode (:ignore )
59
60
exs[2 : end ]
60
61
elseif exs[1 ] isa QuoteNode
61
- error (" before the type, only accepted options are `:expand` and `:ignore`" )
62
+ error (" `@layer` accepts only two options before the layer type, `:expand` and `:ignore` (to control `show`) " )
62
63
else
63
64
push! (out. args, _macro_layer_show (esc (exs[1 ])))
64
65
exs
@@ -76,12 +77,14 @@ macro layer(exs...)
76
77
for j in 1 : length (rest)
77
78
j == i && continue
78
79
ex = rest[j]
79
- Meta. isexpr (ex, :(= )) || error (" expected keyword = fields" )
80
+ Meta. isexpr (ex, :(= )) || error (" The macro `@layer` expects here ` keyword = ( fields...,)`, got $ex " )
80
81
81
82
name = if ex. args[1 ] == :trainable
82
83
:(Optimisers. trainable)
84
+ elseif ex. args[1 ] == :functor
85
+ error (" Can't use `functor=(...)` as a keyword to `@layer`. Use `childen=(...)` to define a method for `functor`." )
83
86
else
84
- @warn " trying to define a method for `$(ex. args[1 ]) ` in your scope... this is experimental" maxlog= 1
87
+ @warn " Trying to define a method for `$(ex. args[1 ]) ` in your scope... this is experimental" maxlog= 1
85
88
esc (ex. args[1 ])
86
89
end
87
90
push! (out. args, _macro_trainable (esc (type), name, ex. args[2 ]))
94
97
95
98
function _check_new_macro (x:: T ) where T
96
99
Functors. isleaf (x) && return
97
- @warn " This type should now use Flux.@layer instead of @functor" T maxlog = 1 _id = hash (T )
100
+ Base . depwarn ( " This type should probably now use ` Flux.@layer` instead of ` @functor`: $T " , Symbol ( " @functor " ) )
98
101
end
99
102
_check_new_macro (:: Tuple ) = nothing # defined by Functors.jl, not by users
100
103
_check_new_macro (:: NamedTuple ) = nothing
@@ -159,11 +162,10 @@ function _macro_trainable(type, fun, fields)
159
162
gets = [:(getfield (x, $ f)) for f in quoted]
160
163
quote
161
164
$ fun (x:: $type ) = NamedTuple {$symbols} (($ (gets... ),))
162
- # Flux.trainable(x::$type) = NamedTuple{$symbols}(($(gets...),)) # ?? scope is weird
163
165
end
164
166
end
165
167
_macro_trainable (type, fun, field:: Union{Symbol,QuoteNode} ) = _macro_trainable (type, fun, :(($ field,))) # lets you forget a comma
166
168
167
169
_noquotenode (s:: Symbol ) = s
168
170
_noquotenode (q:: QuoteNode ) = q. value # lets you write trainable=(:x,:y) instead of (x,y)
169
- _noquotenode (ex) = error (" expected a symbol, got $ex " )
171
+ _noquotenode (ex) = error (" expected a symbol here, as a field name, but got $ex " )
0 commit comments