Skip to content

Commit 511085a

Browse files
committed
Changed keyword output -> f and fixed SymbolicUtils version.
1 parent 26565f0 commit 511085a

File tree

5 files changed

+45
-32
lines changed

5 files changed

+45
-32
lines changed

Project.toml

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,16 +10,20 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1010
RuntimeGeneratedFunctions = "7e49a35a-f44a-4d26-94aa-eba1b4ca6b47"
1111
Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"
1212

13+
[weakdeps]
14+
SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b"
15+
1316
[compat]
1417
AbstractNeuralNetworks = "0.3, 0.4, 0.5"
1518
Documenter = "1.8.0"
1619
ForwardDiff = "0.10.38"
1720
GeometricMachineLearning = "0.3.7"
1821
Latexify = "0.16.5"
1922
RuntimeGeneratedFunctions = "0.5"
23+
SymbolicUtils = "<3.8.0"
2024
Symbolics = "5, 6"
2125
Zygote = "0.6.73"
22-
julia = "1.6"
26+
julia = "1.10"
2327

2428
[extras]
2529
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"

src/derivatives/derivative.jl

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""
22
Derivative
33
"""
4-
abstract type Derivative{ST, OT, SDT} end
4+
abstract type Derivative{ST, FT, SDT} end
55

66
derivative(::DT) where {DT <: Derivative} = error("No method of function `derivative` defined for type $(DT).")
77

@@ -19,16 +19,16 @@ function symbolic_differentials(sparams::NeuralNetworkParameters)
1919
NeuralNetworkParameters{keys(sparams)}(vals)
2020
end
2121

22-
function symbolic_derivative(soutput, Dx::AbstractArray)
23-
[expand_derivatives(Symbolics.scalarize(dx(soutput))) for dx in Dx]
22+
function symbolic_derivative(f, Dx::AbstractArray)
23+
[expand_derivatives(Symbolics.scalarize(dx(f))) for dx in Dx]
2424
end
2525

26-
function symbolic_derivative(soutput, dps::NamedTuple)
27-
gradient_values = (symbolic_derivative(soutput, dps[key]) for key in keys(dps))
26+
function symbolic_derivative(f, dps::NamedTuple)
27+
gradient_values = (symbolic_derivative(f, dps[key]) for key in keys(dps))
2828
NamedTuple{keys(dps)}(gradient_values)
2929
end
3030

31-
function symbolic_derivative(soutput, dps::NeuralNetworkParameters)
32-
vals = Tuple(symbolic_derivative(soutput, dp) for dp in values(dps))
31+
function symbolic_derivative(f, dps::NeuralNetworkParameters)
32+
vals = Tuple(symbolic_derivative(f, dp) for dp in values(dps))
3333
NeuralNetworkParameters{keys(dps)}(vals)
3434
end

src/derivatives/gradient.jl

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,17 @@ Computes and stores the gradient of a symbolic function with respect to the para
55
66
# Constructors
77
8-
Gradient(output, nn)
8+
Gradient(f, nn)
99
10-
Differentiate the symbolic `output` with respect to the parameters of `nn`.
10+
Differentiate the symbolic `f` with respect to the parameters of `nn`.
1111
1212
Gradient(nn)
1313
14-
Compute the symbolic output of `nn` and differentiate it with respect to the parameters of `nn`.
14+
Compute the symbolic output of `nn` and differentiate it with respect to the parameters of `nn`. This does:
15+
16+
```julia
17+
nn.model(nn.input, params(nn))
18+
```
1519
1620
# Examples
1721
@@ -40,9 +44,9 @@ L"\begin{equation}
4044
4145
Internally the constructors are using [`symbolic_pullback`](@ref).
4246
"""
43-
struct Gradient{ST, OT, SDT} <: Derivative{ST, OT, SDT}
47+
struct Gradient{ST, FT, SDT} <: Derivative{ST, FT, SDT}
4448
nn::ST
45-
output::OT
49+
f::FT
4650
::SDT
4751
end
4852

@@ -51,6 +55,8 @@ end
5155
5256
# Examples
5357
58+
We compare this to [`symbolic_pullback`](@ref) here:
59+
5460
```jldoctest
5561
using SymbolicNeuralNetworks: SymbolicNeuralNetwork, Gradient, derivative, symbolic_pullback
5662
using AbstractNeuralNetworks
@@ -60,7 +66,7 @@ nn = SymbolicNeuralNetwork(c)
6066
g = Gradient(nn)
6167
∇ = derivative(g)
6268
63-
isequal(∇, symbolic_pullback(g.output, nn))
69+
isequal(∇, symbolic_pullback(g.f, nn))
6470
6571
# output
6672
@@ -79,9 +85,9 @@ function Gradient(nn::SymbolicNeuralNetwork)
7985
end
8086

8187
@doc raw"""
82-
symbolic_pullback(nn, output)
88+
symbolic_pullback(nn, f)
8389
84-
This takes a symbolic output that depends on the parameters in `nn` and returns the corresponding pullback (a symbolic expression).
90+
This takes a symbolic `f`` that depends on the parameters in `nn` and returns the corresponding pullback (a symbolic expression).
8591
8692
This is used by [`Gradient`](@ref) and [`SymbolicPullback`](@ref).
8793
@@ -113,7 +119,7 @@ L"\begin{equation}
113119
"
114120
```
115121
"""
116-
function symbolic_pullback(soutput::EqT, nn::AbstractSymbolicNeuralNetwork)::Union{AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, Union{NamedTuple, NeuralNetworkParameters}}
122+
function symbolic_pullback(f::EqT, nn::AbstractSymbolicNeuralNetwork)::Union{AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, Union{NamedTuple, NeuralNetworkParameters}}
117123
symbolic_diffs = symbolic_differentials(params(nn))
118-
[symbolic_derivative(soutput_single, symbolic_diffs) for soutput_single soutput]
124+
[symbolic_derivative(f_single, symbolic_diffs) for f_single f]
119125
end

src/derivatives/jacobian.jl

Lines changed: 16 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,27 @@
11
@doc raw"""
22
Jacobian <: Derivative
33
4-
An instance of [`Derivative`](@ref). Computes the derivatives of a neural network with respect to its inputs.
4+
An subtype of [`Derivative`](@ref). Computes the derivatives of a neural network with respect to its inputs.
55
66
# Constructors
77
8-
Jacobian(output, nn)
8+
Jacobian(f, nn)
99
Jacobian(nn)
1010
1111
Compute the jacobian of a [`SymbolicNeuralNetwork`](@ref) with respect to the input arguments.
1212
13-
The output of `Jacobian` consists of a `NamedTuple` that has the following keys:
14-
1. a symbolic expression of the input (keyword `x`),
15-
2. a symbolic expression of the output (keyword `soutput`),
16-
3. a symbolic expression of the gradient (keyword `s∇output`).
1713
18-
If `output` is not supplied as an input argument than it is taken to be:
14+
# Keys
15+
16+
`Jacobian` has the following keys:
17+
1. `nn::`[`SymbolicNeuralNetwork`](@ref),
18+
2. `f`: a symbolic expression to be differentiated,
19+
3. `□`: a symbolic expression of the Jacobian.
20+
21+
If `f` is not supplied as an input argument than it is taken to be:
1922
2023
```julia
21-
soutput = nn.model(nn.input, params(nn))
24+
f = nn.model(nn.input, params(nn))
2225
```
2326
2427
# Implementation
@@ -71,9 +74,9 @@ jacobian1(input, ps) ≈ [analytic_jacobian(i, j) for j ∈ 1:output_dim, i ∈
7174
true
7275
```
7376
"""
74-
struct Jacobian{ST, OT, SDT} <: Derivative{ST, OT, SDT}
77+
struct Jacobian{ST, FT, SDT} <: Derivative{ST, FT, SDT}
7578
nn::ST
76-
output::OT
79+
f::FT
7780
::SDT
7881
end
7982

@@ -87,12 +90,12 @@ function Jacobian(nn::AbstractSymbolicNeuralNetwork)
8790
Jacobian(soutput, nn)
8891
end
8992

90-
function Jacobian(soutput::EqT, nn::AbstractSymbolicNeuralNetwork)
93+
function Jacobian(f::EqT, nn::AbstractSymbolicNeuralNetwork)
9194
# make differential
9295
Dx = symbolic_differentials(nn.input)
9396

9497
# Evaluation of gradient
95-
s∇output = hcat([expand_derivatives.(Symbolics.scalarize(dx(soutput))) for dx in Dx]...)
98+
s∇f = hcat([expand_derivatives.(Symbolics.scalarize(dx(f))) for dx in Dx]...)
9699

97-
Jacobian(nn, soutput, s∇output)
100+
Jacobian(nn, f, s∇f)
98101
end

test/derivatives/jacobian.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ function test_jacobian(n::Integer, T = Float32)
2828

2929
params = NeuralNetwork(c, T).params
3030
input = rand(T, n)
31-
@test build_nn_function(g.output, nn)(input, params) c(input, params)
31+
@test build_nn_function(g.f, nn)(input, params) c(input, params)
3232
@test build_nn_function(derivative(g), nn)(input, params) ForwardDiff.jacobian(input -> c(input, params), input)
3333
end
3434

0 commit comments

Comments
 (0)