Skip to content

Commit 0adcf55

Browse files
authored
Merge pull request #25 from JuliaGNI/flip-arguments-in-jacobian
Flip keys in jacobian
2 parents 15a8c6e + 6bbd0bf commit 0adcf55

File tree

4 files changed

+20
-20
lines changed

4 files changed

+20
-20
lines changed

Project.toml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,16 +3,16 @@ uuid = "aed23131-dcd0-47ca-8090-d21e605652e3"
33
authors = ["Michael Kraus"]
44
version = "0.2.0"
55

6-
[weakdeps]
7-
SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b"
8-
96
[deps]
107
AbstractNeuralNetworks = "60874f82-5ada-4c70-bd1c-fa6be7711c8a"
118
Latexify = "23fbe1c1-3f47-55db-b15f-69d7ec21a316"
129
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1310
RuntimeGeneratedFunctions = "7e49a35a-f44a-4d26-94aa-eba1b4ca6b47"
1411
Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"
1512

13+
[weakdeps]
14+
SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b"
15+
1616
[compat]
1717
AbstractNeuralNetworks = "0.3, 0.4, 0.5, 0.6"
1818
Documenter = "1.8.0"
@@ -28,12 +28,12 @@ Zygote = "0.6.73"
2828
[extras]
2929
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
3030
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
31+
GeometricMachineLearning = "194d25b2-d3f5-49f0-af24-c124f4aa80cc"
3132
Latexify = "23fbe1c1-3f47-55db-b15f-69d7ec21a316"
3233
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
3334
SafeTestsets = "1bc83da4-3b8d-516f-aca4-4fe02f6d838f"
3435
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
3536
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
36-
GeometricMachineLearning = "194d25b2-d3f5-49f0-af24-c124f4aa80cc"
3737

3838
[targets]
3939
test = ["Test", "ForwardDiff", "Random", "Documenter", "Latexify", "SafeTestsets", "Zygote", "GeometricMachineLearning"]

src/derivatives/derivative.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""
22
Derivative
33
"""
4-
abstract type Derivative{ST, OT, SDT} end
4+
abstract type Derivative{OT, SDT, ST <: AbstractSymbolicNeuralNetwork} end
55

66
derivative(::DT) where {DT <: Derivative} = error("No method of function `derivative` defined for type $(DT).")
77

src/derivatives/gradient.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,10 @@ nn = SymbolicNeuralNetwork(c)
2828
2929
Internally the constructors are using [`symbolic_pullback`](@ref).
3030
"""
31-
struct Gradient{ST, OT, SDT} <: Derivative{ST, OT, SDT}
32-
nn::ST
31+
struct Gradient{OT, SDT, ST} <: Derivative{OT, SDT, ST}
3332
output::OT
3433
::SDT
34+
nn::ST
3535
end
3636

3737
"""
@@ -59,7 +59,7 @@ derivative(g::Gradient) = g.∇
5959

6060
function Gradient(output::EqT, nn::SymbolicNeuralNetwork)
6161
typeof(output) <: AbstractArray ? nothing : (@warn "You should only use `Gradient` together with array expressions! Maybe you wanted to use `SymbolicPullback`.")
62-
Gradient(nn, output, symbolic_pullback(output, nn))
62+
Gradient(output, symbolic_pullback(output, nn), nn)
6363
end
6464

6565
function Gradient(nn::SymbolicNeuralNetwork)
@@ -87,7 +87,7 @@ nn = SymbolicNeuralNetwork(c)
8787
output = c(nn.input, params(nn))
8888
spb = symbolic_pullback(output, nn)
8989
90-
spb[1].L1.b |> latexify
90+
spb[1].L1.b
9191
```
9292
"""
9393
function symbolic_pullback(soutput::EqT, nn::AbstractSymbolicNeuralNetwork)::Union{AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, Union{NamedTuple, NeuralNetworkParameters}}

src/derivatives/jacobian.jl

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -71,28 +71,28 @@ jacobian1(input, ps) ≈ [analytic_jacobian(i, j) for j ∈ 1:output_dim, i ∈
7171
true
7272
```
7373
"""
74-
struct Jacobian{ST, OT, SDT} <: Derivative{ST, OT, SDT}
75-
nn::ST
74+
struct Jacobian{OT, SDT, ST} <: Derivative{OT, SDT, ST}
7675
output::OT
7776
::SDT
77+
nn::ST
7878
end
7979

8080
derivative(j::Jacobian) = j.□
8181

82-
function Jacobian(nn::AbstractSymbolicNeuralNetwork)
83-
84-
# Evaluation of the symbolic output
85-
soutput = nn.model(nn.input, params(nn))
86-
87-
Jacobian(soutput, nn)
88-
end
89-
9082
function Jacobian(soutput::EqT, nn::AbstractSymbolicNeuralNetwork)
9183
# make differential
9284
Dx = symbolic_differentials(nn.input)
9385

9486
# Evaluation of gradient
9587
s∇output = hcat([expand_derivatives.(Symbolics.scalarize(dx(soutput))) for dx in Dx]...)
9688

97-
Jacobian(nn, soutput, s∇output)
89+
Jacobian(soutput, s∇output, nn)
90+
end
91+
92+
function Jacobian(nn::AbstractSymbolicNeuralNetwork)
93+
94+
# Evaluation of the symbolic output
95+
soutput = nn.model(nn.input, params(nn))
96+
97+
Jacobian(soutput, nn)
9898
end

0 commit comments

Comments
 (0)