Skip to content

Commit 11b7f39

Browse files
committed
Changed order of keys for more consistent code.
1 parent 42aa39a commit 11b7f39

File tree

3 files changed

+19
-41
lines changed

3 files changed

+19
-41
lines changed

src/derivatives/derivative.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""
22
Derivative
33
"""
4-
abstract type Derivative{ST, OT, SDT} end
4+
abstract type Derivative{OT, SDT, ST <: AbstractSymbolicNeuralNetwork} end
55

66
derivative(::DT) where {DT <: Derivative} = error("No method of function `derivative` defined for type $(DT).")
77

src/derivatives/gradient.jl

Lines changed: 7 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -15,35 +15,24 @@ Compute the symbolic output of `nn` and differentiate it with respect to the par
1515
1616
# Examples
1717
18-
```jldoctest
18+
```julia
1919
using SymbolicNeuralNetworks: SymbolicNeuralNetwork, Gradient, derivative
2020
using AbstractNeuralNetworks
2121
using Latexify: latexify
2222
2323
c = Chain(Dense(2, 1, tanh))
2424
nn = SymbolicNeuralNetwork(c)
25-
(Gradient(nn) |> derivative)[1].L1.b |> latexify
26-
27-
# output
28-
29-
L"\begin{equation}
30-
\left[
31-
\begin{array}{c}
32-
1 - \tanh^{2}\left( \mathtt{b\_1}_{1} + \mathtt{W\_1}_{1,1} \mathtt{sinput}_{1} + \mathtt{W\_1}_{1,2} \mathtt{sinput}_{2} \right) \\
33-
\end{array}
34-
\right]
35-
\end{equation}
36-
"
25+
(Gradient(nn) |> derivative)[1].L1.b
3726
```
3827
3928
# Implementation
4029
4130
Internally the constructors are using [`symbolic_pullback`](@ref).
4231
"""
43-
struct Gradient{ST, OT, SDT} <: Derivative{ST, OT, SDT}
44-
nn::ST
32+
struct Gradient{OT, SDT, ST} <: Derivative{OT, SDT, ST}
4533
output::OT
4634
::SDT
35+
nn::ST
4736
end
4837

4938
"""
@@ -71,7 +60,7 @@ derivative(g::Gradient) = g.∇
7160

7261
function Gradient(output::EqT, nn::SymbolicNeuralNetwork)
7362
typeof(output) <: AbstractArray ? nothing : (@warn "You should only use `Gradient` together with array expressions! Maybe you wanted to use `SymbolicPullback`.")
74-
Gradient(nn, output, symbolic_pullback(output, nn))
63+
Gradient(output, symbolic_pullback(output, nn), nn)
7564
end
7665

7766
function Gradient(nn::SymbolicNeuralNetwork)
@@ -87,7 +76,7 @@ This is used by [`Gradient`](@ref) and [`SymbolicPullback`](@ref).
8776
8877
# Examples
8978
90-
```jldoctest
79+
```julia
9180
using SymbolicNeuralNetworks: SymbolicNeuralNetwork, symbolic_pullback
9281
using AbstractNeuralNetworks
9382
using LinearAlgebra: norm
@@ -98,18 +87,7 @@ nn = SymbolicNeuralNetwork(c)
9887
output = c(nn.input, nn.params)
9988
spb = symbolic_pullback(output, nn)
10089
101-
spb[1].L1.b |> latexify
102-
103-
# output
104-
105-
L"\begin{equation}
106-
\left[
107-
\begin{array}{c}
108-
1 - \tanh^{2}\left( \mathtt{b\_1}_{1} + \mathtt{W\_1}_{1,1} \mathtt{sinput}_{1} + \mathtt{W\_1}_{1,2} \mathtt{sinput}_{2} \right) \\
109-
\end{array}
110-
\right]
111-
\end{equation}
112-
"
90+
spb[1].L1.b
11391
```
11492
"""
11593
function symbolic_pullback(soutput::EqT, nn::AbstractSymbolicNeuralNetwork)::Union{AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, Union{NamedTuple, NeuralNetworkParameters}}

src/derivatives/jacobian.jl

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -71,28 +71,28 @@ jacobian1(input, ps) ≈ [analytic_jacobian(i, j) for j ∈ 1:output_dim, i ∈
7171
true
7272
```
7373
"""
74-
struct Jacobian{ST, OT, SDT} <: Derivative{ST, OT, SDT}
75-
nn::ST
74+
struct Jacobian{OT, SDT, ST} <: Derivative{OT, SDT, ST}
7675
output::OT
7776
::SDT
77+
nn::ST
7878
end
7979

8080
derivative(j::Jacobian) = j.□
8181

82-
function Jacobian(nn::AbstractSymbolicNeuralNetwork)
83-
84-
# Evaluation of the symbolic output
85-
soutput = nn.model(nn.input, nn.params)
86-
87-
Jacobian(soutput, nn)
88-
end
89-
9082
function Jacobian(soutput::EqT, nn::AbstractSymbolicNeuralNetwork)
9183
# make differential
9284
Dx = symbolic_differentials(nn.input)
9385

9486
# Evaluation of gradient
9587
s∇output = hcat([expand_derivatives.(Symbolics.scalarize(dx(soutput))) for dx in Dx]...)
9688

97-
Jacobian(nn, soutput, s∇output)
89+
Jacobian(soutput, s∇output, nn)
90+
end
91+
92+
function Jacobian(nn::AbstractSymbolicNeuralNetwork)
93+
94+
# Evaluation of the symbolic output
95+
soutput = nn.model(nn.input, nn.params)
96+
97+
Jacobian(soutput, nn)
9898
end

0 commit comments

Comments
 (0)