Skip to content

Commit c383aa2

Browse files
committed
Merge branch 'main' into remove-reduce-code
2 parents a92ac46 + 26565f0 commit c383aa2

40 files changed

+453
-905
lines changed

Project.toml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b"
1717
AbstractNeuralNetworks = "0.3, 0.4, 0.5"
1818
Documenter = "1.8.0"
1919
ForwardDiff = "0.10.38"
20+
GeometricMachineLearning = "0.3.7"
2021
Latexify = "0.16.5"
2122
RuntimeGeneratedFunctions = "0.5"
2223
SafeTestsets = "0.1"
@@ -33,6 +34,7 @@ Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
3334
SafeTestsets = "1bc83da4-3b8d-516f-aca4-4fe02f6d838f"
3435
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
3536
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
37+
GeometricMachineLearning = "194d25b2-d3f5-49f0-af24-c124f4aa80cc"
3638

3739
[targets]
38-
test = ["Test", "ForwardDiff", "Random", "Documenter", "Latexify", "SafeTestsets", "Zygote"]
40+
test = ["Test", "ForwardDiff", "Random", "Documenter", "Latexify", "SafeTestsets", "Zygote", "GeometricMachineLearning"]

docs/src/double_derivative.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
```@example jacobian_gradient
1919
using AbstractNeuralNetworks
2020
using SymbolicNeuralNetworks
21-
using SymbolicNeuralNetworks: Jacobian, Gradient, derivative
21+
using SymbolicNeuralNetworks: Jacobian, Gradient, derivative, params
2222
using Latexify: latexify
2323
2424
c = Chain(Dense(2, 1, tanh; use_bias = false))
@@ -92,7 +92,7 @@ x = \begin{pmatrix} 1 \\ 0 \end{pmatrix}, \quad W = \begin{bmatrix} 1 & 0 \\ 0 &
9292
```
9393

9494
```@example jacobian_gradient
95-
built_function = build_nn_function(derivative(g), nn.params, nn.input)
95+
built_function = build_nn_function(derivative(g), params(nn), nn.input)
9696
9797
x = [1., 0.]
9898
ps = NeuralNetworkParameters((L1 = (W = [1. 0.; 0. 1.], b = [0., 0.]), ))

docs/src/hamiltonian_neural_network.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ z_data = randn(T, 2, n_points)
3535
nothing # hide
3636
```
3737

38-
We now specify a pullback [`HamiltonianSymbolicNeuralNetwork`](@ref):
38+
We now specify a pullback `HamiltonianSymbolicNeuralNetwork`
3939

4040
```julia hnn
4141
_pullback = SymbolicPullback(nn)

docs/src/symbolic_neural_networks.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ We first call the symbolic neural network that only consists of one layer:
66

77
```@example snn
88
using SymbolicNeuralNetworks
9-
using AbstractNeuralNetworks: Chain, Dense
9+
using AbstractNeuralNetworks: Chain, Dense, params
1010
1111
input_dim = 2
1212
output_dim = 1
@@ -23,7 +23,7 @@ using Symbolics
2323
using Latexify: latexify
2424
2525
@variables sinput[1:input_dim]
26-
soutput = nn.model(sinput, nn.params)
26+
soutput = nn.model(sinput, params(nn))
2727
2828
soutput
2929
```
@@ -101,7 +101,7 @@ We now compare the neural network-approximated curve to the original one:
101101
fig = Figure()
102102
ax = Axis3(fig[1, 1])
103103
104-
surface!(x_vec, y_vec, [c([x, y], nn_cpu.params)[1] for x in x_vec, y in y_vec]; alpha = .8, colormap = :darkterrain, transparency = true)
104+
surface!(x_vec, y_vec, [c([x, y], params(nn_cpu))[1] for x in x_vec, y in y_vec]; alpha = .8, colormap = :darkterrain, transparency = true)
105105
fig
106106
```
107107

scripts/pullback_comparison.jl

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,16 +19,16 @@ output = rand(1, batch_size)
1919
# output sensitivities
2020
_do = 1.
2121

22-
# spb(nn_cpu.params, nn.model, (input, output))[2](_do)
23-
# zpb(nn_cpu.params, nn.model, (input, output))[2](_do)
24-
# @time spb_evaluated = spb(nn_cpu.params, nn.model, (input, output))[2](_do)
25-
# @time zpb_evaluated = zpb(nn_cpu.params, nn.model, (input, output))[2](_do)[1].params
22+
# spb(params(nn_cpu), nn.model, (input, output))[2](_do)
23+
# zpb(params(nn_cpu), nn.model, (input, output))[2](_do)
24+
# @time spb_evaluated = spb(params(nn_cpu), nn.model, (input, output))[2](_do)
25+
# @time zpb_evaluated = zpb(params(nn_cpu), nn.model, (input, output))[2](_do)[1].params
2626
# @assert values(spb_evaluated) .≈ values(zpb_evaluated)
2727

2828
function timenn(pb, params, model, input, output, _do = 1.)
2929
pb(params, model, (input, output))[2](_do)
3030
@time pb(params, model, (input, output))[2](_do)
3131
end
3232

33-
timenn(spb, nn_cpu.params, nn.model, input, output)
34-
timenn(zpb, nn_cpu.params, nn.model, input, output)
33+
timenn(spb, params(nn_cpu), nn.model, input, output)
34+
timenn(zpb, params(nn_cpu), nn.model, input, output)

src/SymbolicNeuralNetworks.jl

Lines changed: 9 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -15,42 +15,26 @@ module SymbolicNeuralNetworks
1515

1616
RuntimeGeneratedFunctions.init(@__MODULE__)
1717

18-
include("equation_types.jl")
18+
include("custom_definitions_and_extensions/equation_types.jl")
1919

20-
export symbolize
21-
include("utils/symbolize.jl")
20+
include("symbolic_neuralnet/symbolize.jl")
2221

2322
export AbstractSymbolicNeuralNetwork
24-
export SymbolicNeuralNetwork, SymbolicModel
25-
export HamiltonianSymbolicNeuralNetwork, HNNLoss
26-
export architecture, model, params, equations, functions
23+
export SymbolicNeuralNetwork
2724

28-
# make symbolic parameters (`NeuralNetworkParameters`)
29-
export symbolicparameters
30-
include("layers/abstract.jl")
31-
include("layers/dense.jl")
32-
include("layers/linear.jl")
33-
include("chain.jl")
34-
35-
export evaluate_equations
36-
include("symbolic_neuralnet.jl")
37-
38-
export symbolic_hamiltonian
39-
include("hamiltonian.jl")
25+
include("symbolic_neuralnet/symbolic_neuralnet.jl")
4026

4127
export build_nn_function
42-
include("utils/build_function.jl")
43-
include("utils/build_function2.jl")
44-
include("utils/build_function_arrays.jl")
28+
include("build_function/build_function.jl")
29+
include("build_function/build_function_double_input.jl")
30+
include("build_function/build_function_arrays.jl")
4531

4632
export SymbolicPullback
47-
include("pullback.jl")
33+
include("derivatives/pullback.jl")
4834

4935
include("derivatives/derivative.jl")
5036
include("derivatives/jacobian.jl")
5137
include("derivatives/gradient.jl")
5238

53-
include("custom_equation.jl")
54-
55-
include("utils/latexraw.jl")
39+
include("custom_definitions_and_extensions/latexraw.jl")
5640
end

src/utils/build_function.jl renamed to src/build_function/build_function.jl

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ The functions mentioned in the implementation section were adjusted ad-hoc to de
1919
Other problems may occur. In case you bump into one please [open an issue on github](https://github.com/JuliaGNI/SymbolicNeuralNetworks.jl/issues).
2020
"""
2121
function build_nn_function(eq::EqT, nn::AbstractSymbolicNeuralNetwork)
22-
build_nn_function(eq, nn.params, nn.input)
22+
build_nn_function(eq, params(nn), nn.input)
2323
end
2424

2525
function build_nn_function(eq::EqT, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr)
@@ -39,25 +39,26 @@ Build a function that can process a matrix. This is used as a starting point for
3939
# Examples
4040
4141
```jldoctest
42-
using SymbolicNeuralNetworks: _build_nn_function, symbolicparameters
43-
using Symbolics
44-
using AbstractNeuralNetworks
42+
using SymbolicNeuralNetworks: _build_nn_function, SymbolicNeuralNetwork
43+
using AbstractNeuralNetworks: params, Chain, Dense, NeuralNetwork
44+
import Random
45+
Random.seed!(123)
4546
4647
c = Chain(Dense(2, 1, tanh))
47-
params = symbolicparameters(c)
48-
@variables sinput[1:2]
49-
eq = c(sinput, params)
50-
built_function = _build_nn_function(eq, params, sinput)
51-
ps = NeuralNetwork(c).params
52-
input = rand(2, 2)
53-
54-
(built_function(input, ps, 1), built_function(input, ps, 2)) .≈ (c(input[:, 1], ps), c(input[:, 2], ps))
48+
nn = NeuralNetwork(c)
49+
snn = SymbolicNeuralNetwork(nn)
50+
eq = c(snn.input, params(snn))
51+
built_function = _build_nn_function(eq, params(snn), snn.input)
52+
built_function([1. 2.; 3. 4.], params(nn), 1)
5553
5654
# output
5755
58-
(true, true)
56+
1-element Vector{Float64}:
57+
-0.9999967113439513
5958
```
6059
60+
Note that we have to supply an extra argument (index) to `_build_nn_function` that we do not have to supply to [`build_nn_function`](@ref).
61+
6162
# Implementation
6263
6364
This first calls `Symbolics.build_function` with the keyword argument `expression = Val{true}` and then modifies the generated code by calling:

src/utils/build_function_arrays.jl renamed to src/build_function/build_function_arrays.jl

Lines changed: 23 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,32 +1,29 @@
11
"""
22
build_nn_function(eqs::AbstractArray{<:NeuralNetworkParameters}, sparams, sinput...)
33
4-
Build an executable function based on `eqs` that potentially also has a symbolic output.
4+
Build an executable function based on an array of symbolic equations `eqs`.
55
66
# Examples
77
88
```jldoctest
99
using SymbolicNeuralNetworks: build_nn_function, SymbolicNeuralNetwork
10-
using AbstractNeuralNetworks: Chain, Dense, NeuralNetwork
10+
using AbstractNeuralNetworks: Chain, Dense, NeuralNetwork, params
1111
import Random
1212
Random.seed!(123)
1313
1414
ch = Chain(Dense(2, 1, tanh))
15-
nn = SymbolicNeuralNetwork(ch)
16-
eqs = [(a = ch(nn.input, nn.params), b = ch(nn.input, nn.params).^2), (c = ch(nn.input, nn.params).^3, )]
17-
funcs = build_nn_function(eqs, nn.params, nn.input)
15+
nn = NeuralNetwork(ch)
16+
snn = SymbolicNeuralNetwork(nn)
17+
eqs = [(a = ch(snn.input, params(snn)), b = ch(snn.input, params(snn)).^2), (c = ch(snn.input, params(snn)).^3, )]
18+
funcs = build_nn_function(eqs, params(snn), snn.input)
1819
input = [1., 2.]
19-
ps = NeuralNetwork(ch).params
20-
a = ch(input, ps)
21-
b = ch(input, ps).^2
22-
c = ch(input, ps).^3
23-
funcs_evaluated = funcs(input, ps)
24-
25-
(funcs_evaluated[1].a, funcs_evaluated[1].b, funcs_evaluated[2].c) .≈ (a, b, c)
20+
funcs_evaluated = funcs(input, params(nn))
2621
2722
# output
2823
29-
(true, true, true)
24+
2-element Vector{NamedTuple}:
25+
(a = [-0.9999386280616135], b = [0.9998772598897417])
26+
(c = [-0.9998158954841537],)
3027
```
3128
"""
3229
function build_nn_function(eqs::AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
@@ -47,25 +44,21 @@ Return a function that takes an input, (optionally) an output and neural network
4744
4845
```jldoctest
4946
using SymbolicNeuralNetworks: build_nn_function, SymbolicNeuralNetwork
50-
using AbstractNeuralNetworks: Chain, Dense, NeuralNetwork
47+
using AbstractNeuralNetworks: Chain, Dense, NeuralNetwork, params
5148
import Random
5249
Random.seed!(123)
5350
5451
c = Chain(Dense(2, 1, tanh))
55-
nn = SymbolicNeuralNetwork(c)
56-
eqs = (a = c(nn.input, nn.params), b = c(nn.input, nn.params).^2)
57-
funcs = build_nn_function(eqs, nn.params, nn.input)
52+
nn = NeuralNetwork(c)
53+
snn = SymbolicNeuralNetwork(nn)
54+
eqs = (a = c(snn.input, params(snn)), b = c(snn.input, params(snn)).^2)
55+
funcs = build_nn_function(eqs, params(snn), snn.input)
5856
input = [1., 2.]
59-
ps = NeuralNetwork(c).params
60-
a = c(input, ps)
61-
b = c(input, ps).^2
62-
funcs_evaluated = funcs(input, ps)
63-
64-
(funcs_evaluated.a, funcs_evaluated.b) .≈ (a, b)
57+
funcs_evaluated = funcs(input, params(nn))
6558
6659
# output
6760
68-
(true, true)
61+
(a = [-0.9999386280616135], b = [0.9998772598897417])
6962
```
7063
7164
# Implementation
@@ -90,16 +83,17 @@ Return an executable function for each entry in `eqs`. This still has to be proc
9083
9184
```jldoctest
9285
using SymbolicNeuralNetworks: function_valued_parameters, SymbolicNeuralNetwork
93-
using AbstractNeuralNetworks: Chain, Dense, NeuralNetwork
86+
using AbstractNeuralNetworks: Chain, Dense, NeuralNetwork, params
9487
import Random
9588
Random.seed!(123)
9689
9790
c = Chain(Dense(2, 1, tanh))
98-
nn = SymbolicNeuralNetwork(c)
99-
eqs = (a = c(nn.input, nn.params), b = c(nn.input, nn.params).^2)
100-
funcs = function_valued_parameters(eqs, nn.params, nn.input)
91+
nn = NeuralNetwork(c)
92+
snn = SymbolicNeuralNetwork(nn)
93+
eqs = (a = c(snn.input, params(snn)), b = c(snn.input, params(snn)).^2)
94+
funcs = function_valued_parameters(eqs, params(snn), snn.input)
10195
input = [1., 2.]
102-
ps = NeuralNetwork(c).params
96+
ps = params(nn)
10397
a = c(input, ps)
10498
b = c(input, ps).^2
10599

src/utils/build_function2.jl renamed to src/build_function/build_function_double_input.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ Also compare this to [`build_nn_function(::EqT, ::AbstractSymbolicNeuralNetwork)
1313
See the *extended help section* of [`build_nn_function(::EqT, ::AbstractSymbolicNeuralNetwork)`](@ref).
1414
"""
1515
function build_nn_function(eqs, nn::AbstractSymbolicNeuralNetwork, soutput)
16-
build_nn_function(eqs, nn.params, nn.input, soutput)
16+
build_nn_function(eqs, params(nn), nn.input, soutput)
1717
end
1818

1919
function build_nn_function(eq::EqT, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr, soutput::Symbolics.Arr)

src/chain.jl

Lines changed: 0 additions & 5 deletions
This file was deleted.

0 commit comments

Comments
 (0)