Skip to content

Commit 33082d8

Browse files
authored
Merge pull request #26 from JuliaGNI/output-to-f
Changed keyword output -> f.
2 parents ad63009 + 6895926 commit 33082d8

File tree

5 files changed

+39
-33
lines changed

5 files changed

+39
-33
lines changed

Project.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ ForwardDiff = "0.10.38"
2020
GeometricMachineLearning = "0.4"
2121
Latexify = "0.16.5"
2222
RuntimeGeneratedFunctions = "0.5"
23-
SafeTestsets = "0.1"
2423
SymbolicUtils = "<3.8.0"
2524
Symbolics = "5, 6"
2625
Zygote = "0.6.73"

src/derivatives/derivative.jl

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,16 +19,16 @@ function symbolic_differentials(sparams::NeuralNetworkParameters)
1919
NeuralNetworkParameters{keys(sparams)}(vals)
2020
end
2121

22-
function symbolic_derivative(soutput, Dx::AbstractArray)
23-
[expand_derivatives(Symbolics.scalarize(dx(soutput))) for dx in Dx]
22+
function symbolic_derivative(f, Dx::AbstractArray)
23+
[expand_derivatives(Symbolics.scalarize(dx(f))) for dx in Dx]
2424
end
2525

26-
function symbolic_derivative(soutput, dps::NamedTuple)
27-
gradient_values = (symbolic_derivative(soutput, dps[key]) for key in keys(dps))
26+
function symbolic_derivative(f, dps::NamedTuple)
27+
gradient_values = (symbolic_derivative(f, dps[key]) for key in keys(dps))
2828
NamedTuple{keys(dps)}(gradient_values)
2929
end
3030

31-
function symbolic_derivative(soutput, dps::NeuralNetworkParameters)
32-
vals = Tuple(symbolic_derivative(soutput, dp) for dp in values(dps))
31+
function symbolic_derivative(f, dps::NeuralNetworkParameters)
32+
vals = Tuple(symbolic_derivative(f, dp) for dp in values(dps))
3333
NeuralNetworkParameters{keys(dps)}(vals)
3434
end

src/derivatives/gradient.jl

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,17 @@ Computes and stores the gradient of a symbolic function with respect to the para
55
66
# Constructors
77
8-
Gradient(output, nn)
8+
Gradient(f, nn)
99
10-
Differentiate the symbolic `output` with respect to the parameters of `nn`.
10+
Differentiate the symbolic `f` with respect to the parameters of `nn`.
1111
1212
Gradient(nn)
1313
14-
Compute the symbolic output of `nn` and differentiate it with respect to the parameters of `nn`.
14+
Compute the symbolic output of `nn` and differentiate it with respect to the parameters of `nn`. This does:
15+
16+
```julia
17+
nn.model(nn.input, params(nn))
18+
```
1519
1620
# Examples
1721
@@ -29,7 +33,7 @@ nn = SymbolicNeuralNetwork(c)
2933
Internally the constructors are using [`symbolic_pullback`](@ref).
3034
"""
3135
struct Gradient{OT, SDT, ST} <: Derivative{OT, SDT, ST}
32-
output::OT
36+
f::OT
3337
::SDT
3438
nn::ST
3539
end
@@ -39,6 +43,8 @@ end
3943
4044
# Examples
4145
46+
We compare this to [`symbolic_pullback`](@ref) here:
47+
4248
```jldoctest
4349
using SymbolicNeuralNetworks: SymbolicNeuralNetwork, Gradient, derivative, symbolic_pullback
4450
using AbstractNeuralNetworks
@@ -48,7 +54,7 @@ nn = SymbolicNeuralNetwork(c)
4854
g = Gradient(nn)
4955
∇ = derivative(g)
5056
51-
isequal(∇, symbolic_pullback(g.output, nn))
57+
isequal(∇, symbolic_pullback(g.f, nn))
5258
5359
# output
5460
@@ -67,9 +73,9 @@ function Gradient(nn::SymbolicNeuralNetwork)
6773
end
6874

6975
@doc raw"""
70-
symbolic_pullback(nn, output)
76+
symbolic_pullback(f, nn)
7177
72-
This takes a symbolic output that depends on the parameters in `nn` and returns the corresponding pullback (a symbolic expression).
78+
This takes a symbolic `f`` that depends on the parameters in `nn` and returns the corresponding pullback (a symbolic expression).
7379
7480
This is used by [`Gradient`](@ref) and [`SymbolicPullback`](@ref).
7581
@@ -89,7 +95,7 @@ spb = symbolic_pullback(output, nn)
8995
spb[1].L1.b
9096
```
9197
"""
92-
function symbolic_pullback(soutput::EqT, nn::AbstractSymbolicNeuralNetwork)::Union{AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, Union{NamedTuple, NeuralNetworkParameters}}
98+
function symbolic_pullback(f::EqT, nn::AbstractSymbolicNeuralNetwork)::Union{AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, Union{NamedTuple, NeuralNetworkParameters}}
9399
symbolic_diffs = symbolic_differentials(params(nn))
94-
[symbolic_derivative(soutput_single, symbolic_diffs) for soutput_single soutput]
100+
[symbolic_derivative(f_single, symbolic_diffs) for f_single f]
95101
end

src/derivatives/jacobian.jl

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,27 @@
11
@doc raw"""
22
Jacobian <: Derivative
33
4-
An instance of [`Derivative`](@ref). Computes the derivatives of a neural network with respect to its inputs.
4+
An subtype of [`Derivative`](@ref). Computes the derivatives of a neural network with respect to its inputs.
55
66
# Constructors
77
8-
Jacobian(output, nn)
8+
Jacobian(f, nn)
99
Jacobian(nn)
1010
1111
Compute the jacobian of a [`SymbolicNeuralNetwork`](@ref) with respect to the input arguments.
1212
13-
The output of `Jacobian` consists of a `NamedTuple` that has the following keys:
14-
1. a symbolic expression of the input (keyword `x`),
15-
2. a symbolic expression of the output (keyword `soutput`),
16-
3. a symbolic expression of the gradient (keyword `s∇output`).
1713
18-
If `output` is not supplied as an input argument than it is taken to be:
14+
# Keys
15+
16+
`Jacobian` has the following keys:
17+
1. `nn::`[`SymbolicNeuralNetwork`](@ref),
18+
2. `f`: a symbolic expression to be differentiated,
19+
3. `□`: a symbolic expression of the Jacobian.
20+
21+
If `f` is not supplied as an input argument than it is taken to be:
1922
2023
```julia
21-
soutput = nn.model(nn.input, params(nn))
24+
f = nn.model(nn.input, params(nn))
2225
```
2326
2427
# Implementation
@@ -72,21 +75,21 @@ true
7275
```
7376
"""
7477
struct Jacobian{OT, SDT, ST} <: Derivative{OT, SDT, ST}
75-
output::OT
78+
f::OT
7679
::SDT
7780
nn::ST
7881
end
7982

8083
derivative(j::Jacobian) = j.□
8184

82-
function Jacobian(soutput::EqT, nn::AbstractSymbolicNeuralNetwork)
85+
function Jacobian(f::EqT, nn::AbstractSymbolicNeuralNetwork)
8386
# make differential
8487
Dx = symbolic_differentials(nn.input)
8588

8689
# Evaluation of gradient
87-
s∇output = hcat([expand_derivatives.(Symbolics.scalarize(dx(soutput))) for dx in Dx]...)
90+
s∇f = hcat([expand_derivatives.(Symbolics.scalarize(dx(f))) for dx in Dx]...)
8891

89-
Jacobian(soutput, s∇output, nn)
92+
Jacobian(f, s∇f, nn)
9093
end
9194

9295
function Jacobian(nn::AbstractSymbolicNeuralNetwork)

test/derivatives/jacobian.jl

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -28,14 +28,12 @@ function test_jacobian(n::Integer, T = Float32)
2828

2929
_params = params(NeuralNetwork(c, T))
3030
input = rand(T, n)
31-
f = build_nn_function(g.output, nn)
32-
∇f = build_nn_function(derivative(g), nn)
33-
@test f(input, params) c(input, params)
34-
@test ∇f(input, params) ForwardDiff.jacobian(input -> c(input, params), input)
31+
@test build_nn_function(g.f, nn)(input, _params) c(input, _params)
32+
@test build_nn_function(derivative(g), nn)(input, _params) ForwardDiff.jacobian(input -> c(input, _params), input)
3533
end
3634

3735
for n 10:1
3836
for T (Float32, Float64)
3937
test_jacobian(n, T)
40-
end
38+
end
4139
end

0 commit comments

Comments
 (0)