Skip to content

Commit eb6492c

Browse files
authored
More lazy strings (#2402)
1 parent a8737d7 commit eb6492c

File tree

7 files changed

+13
-13
lines changed

7 files changed

+13
-13
lines changed

src/functor.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,12 +64,12 @@ Possible values of `inactive` are:
6464
"""
6565
function testmode!(m, mode)
6666
inactive = if mode isa Symbol
67-
mode === :auto || throw(ArgumentError("testmode! accepts only the symbol :auto, got :$mode"))
67+
mode === :auto || throw(ArgumentError(lazy"testmode! accepts only the symbol :auto, got :$mode"))
6868
nothing
6969
elseif mode isa Union{Bool,Nothing}
7070
mode
7171
else
72-
throw(ArgumentError("testmode! does not accept $(repr(mode)) as the 2nd argument"))
72+
throw(ArgumentError(lazy"testmode! does not accept $(repr(mode)) as the 2nd argument"))
7373
end
7474
foreach(x -> testmode!(x, inactive), trainable(m))
7575
m

src/layers/basic.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ function Base.show(io::IO, c::Chain)
7575
end
7676

7777
_show_layers(io, layers::Tuple) = join(io, layers, ", ")
78-
_show_layers(io, layers::NamedTuple) = join(io, ["$k = $v" for (k, v) in pairs(layers)], ", ")
78+
_show_layers(io, layers::NamedTuple) = join(io, [lazy"$k = $v" for (k, v) in pairs(layers)], ", ")
7979
_show_layers(io, layers::AbstractVector) = (print(io, "["); join(io, layers, ", "); print(io, "]"))
8080

8181
# This is a temporary and naive implementation
@@ -531,7 +531,7 @@ function _parallel_check(layers, xs)
531531
nl = length(layers)
532532
nx = length(xs)
533533
if (nl != nx)
534-
throw(ArgumentError("Parallel with $nl sub-layers can take one input or $nl inputs, but got $nx inputs"))
534+
throw(ArgumentError(lazy"Parallel with $nl sub-layers can take one input or $nl inputs, but got $nx inputs"))
535535
end
536536
end
537537
ChainRulesCore.@non_differentiable _parallel_check(nl, nx)
@@ -616,7 +616,7 @@ function _pairwise_check(x, layers, T)
616616
lx = length(x)
617617
N = length(layers)
618618
if T <: Tuple && lx != N
619-
throw(ArgumentError("PairwiseFusion with $N sub-layers can take one input or $N inputs, but got $lx inputs"))
619+
throw(ArgumentError(lazy"PairwiseFusion with $N sub-layers can take one input or $N inputs, but got $lx inputs"))
620620
end
621621
end
622622
ChainRulesCore.@non_differentiable _pairwise_check(lx, N, T)

src/layers/macro.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ macro layer(exs...)
7070

7171
for j in 1:length(rest)
7272
ex = rest[j]
73-
Meta.isexpr(ex, :(=)) || error("The macro `@layer` expects here `keyword = (fields...,)`, got $ex")
73+
Meta.isexpr(ex, :(=)) || error("The macro `@layer` expects here `keyword = (fields...,)`, got ", ex)
7474

7575
name = if ex.args[1] == :trainable
7676
:(Optimisers.trainable)
@@ -153,4 +153,4 @@ _macro_trainable(type, fun, field::Union{Symbol,QuoteNode}) = _macro_trainable(t
153153

154154
_noquotenode(s::Symbol) = s
155155
_noquotenode(q::QuoteNode) = q.value # lets you write trainable=(:x,:y) instead of (x,y)
156-
_noquotenode(ex) = error("expected a symbol here, as a field name, but got $ex")
156+
_noquotenode(ex) = error("expected a symbol here, as a field name, but got ", ex)

src/layers/normalise.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ _isactive(m, x) = isnothing(m.active) ? NNlib.within_gradient(x) : m.active
44
# Internal function, used only in this file.
55
_tidy_active(mode::Bool) = mode
66
_tidy_active(::Nothing) = nothing
7-
_tidy_active(mode) = mode === :auto ? nothing : throw(ArgumentError("active = $(repr(mode)) is not accepted, must be true/false/nothing or :auto"))
7+
_tidy_active(mode) = mode === :auto ? nothing : throw(ArgumentError(lazy"active = $(repr(mode)) is not accepted, must be true/false/nothing or :auto"))
88

99
"""
1010
Dropout(p; [dims, rng, active])
@@ -74,7 +74,7 @@ end
7474
Dropout(p::Real, dims, active) = Dropout(p, dims, active, default_rng())
7575

7676
function Dropout(p::Real; dims=:, active::Union{Bool,Nothing} = nothing, rng = default_rng())
77-
0 p 1 || throw(ArgumentError("Dropout expects 0 ≤ p ≤ 1, got p = $p"))
77+
0 p 1 || throw(ArgumentError(lazy"Dropout expects 0 ≤ p ≤ 1, got p = $p"))
7878
Dropout(p, dims, active, rng)
7979
end
8080

@@ -126,7 +126,7 @@ end
126126

127127
AlphaDropout(p, active) = AlphaDropout(p, active, default_rng())
128128
function AlphaDropout(p; rng = default_rng(), active::Union{Bool,Nothing} = nothing)
129-
0 p 1 || throw(ArgumentError("AlphaDropout expects 0 ≤ p ≤ 1, got p = $p"))
129+
0 p 1 || throw(ArgumentError(lazy"AlphaDropout expects 0 ≤ p ≤ 1, got p = $p"))
130130
AlphaDropout(p, active, rng)
131131
end
132132

src/optimise/train.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ function train!(loss, ps::Params, data, opt::AbstractOptimiser; cb = () -> ())
9292
loss(batchmemaybe(d)...)
9393
end
9494
if !isfinite(l)
95-
throw(DomainError("Loss is $l on data item $i, stopping training"))
95+
throw(DomainError(lazy"Loss is $l on data item $i, stopping training"))
9696
end
9797
update!(opt, ps, gs)
9898
cb()

src/outputsize.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ end
230230
function _makelazy(ex::Expr)
231231
n = _underscoredepth(ex)
232232
n == 0 && return ex
233-
n == 1 && error("@autosize doesn't expect an underscore here: $ex")
233+
n == 1 && error("@autosize doesn't expect an underscore here: ", ex)
234234
n == 2 && return :($LazyLayer($(string(ex)), $(_makefun(ex)), nothing))
235235
n > 2 && return Expr(ex.head, map(_makelazy, ex.args)...)
236236
end

src/train.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ function train!(loss, model, data, opt; cb = nothing)
106106
d_splat = d isa Tuple ? d : (d,)
107107
l, gs = Zygote.withgradient(m -> loss(m, d_splat...), model)
108108
if !isfinite(l)
109-
throw(DomainError("Loss is $l on data item $i, stopping training"))
109+
throw(DomainError(lazy"Loss is $l on data item $i, stopping training"))
110110
end
111111
opt, model = Optimisers.update!(opt, model, gs[1])
112112
@logprogress Base.haslength(data) ? i/length(data) : nothing

0 commit comments

Comments
 (0)