Skip to content

Commit f42db88

Browse files
author
Benedikt Brantner Admin
committed
Merge branch 'main' into remove-reduce-code
2 parents c383aa2 + 15a8c6e commit f42db88

File tree

12 files changed

+33
-22
lines changed

12 files changed

+33
-22
lines changed

Project.toml

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@ uuid = "aed23131-dcd0-47ca-8090-d21e605652e3"
33
authors = ["Michael Kraus"]
44
version = "0.2.0"
55

6+
[weakdeps]
7+
SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b"
8+
69
[deps]
710
AbstractNeuralNetworks = "60874f82-5ada-4c70-bd1c-fa6be7711c8a"
811
Latexify = "23fbe1c1-3f47-55db-b15f-69d7ec21a316"
@@ -14,14 +17,16 @@ Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"
1417
SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b"
1518

1619
[compat]
17-
AbstractNeuralNetworks = "0.3, 0.4, 0.5"
20+
AbstractNeuralNetworks = "0.3, 0.4, 0.5, 0.6"
1821
Documenter = "1.8.0"
1922
ForwardDiff = "0.10.38"
20-
GeometricMachineLearning = "0.3.7"
23+
GeometricMachineLearning = "0.4"
2124
Latexify = "0.16.5"
2225
RuntimeGeneratedFunctions = "0.5"
2326
SafeTestsets = "0.1"
2427
SymbolicUtils = "<3.8.0"
28+
SafeTestsets = "0.1"
29+
SymbolicUtils = "<3.8.0"
2530
Symbolics = "5, 6"
2631
Zygote = "0.6.73"
2732
julia = "1.10"

docs/src/double_derivative.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
```@example jacobian_gradient
1919
using AbstractNeuralNetworks
2020
using SymbolicNeuralNetworks
21-
using SymbolicNeuralNetworks: Jacobian, Gradient, derivative, params
21+
using SymbolicNeuralNetworks: Jacobian, Gradient, derivative
2222
using Latexify: latexify
2323
2424
c = Chain(Dense(2, 1, tanh; use_bias = false))
@@ -59,7 +59,7 @@ We can easily differentiate a neural network twice by using [`SymbolicNeuralNetw
5959
```@example jacobian_gradient
6060
using AbstractNeuralNetworks
6161
using SymbolicNeuralNetworks
62-
using SymbolicNeuralNetworks: Jacobian, Gradient, derivative
62+
using SymbolicNeuralNetworks: Jacobian, Gradient, derivative, params
6363
using Latexify: latexify
6464
6565
c = Chain(Dense(2, 1, tanh))

docs/src/hamiltonian_neural_network.md

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ Here we build a Hamiltonian neural network as a symbolic neural network.
44

55
```julia hnn
66
using SymbolicNeuralNetworks
7+
using SymbolicNeuralNetworks: params
78
using GeometricMachineLearning
89
using AbstractNeuralNetworks: Dense, UnknownArchitecture, Model
910
using LinearAlgebra: norm
@@ -45,7 +46,7 @@ nothing # hide
4546
We can now train the network:
4647

4748
```julia hnn
48-
ps = NeuralNetwork(c, T).params
49+
ps = params(NeuralNetwork(c, T))
4950
dl = DataLoader(z_data, hvf_analytic(z_data))
5051
o = Optimizer(AdamOptimizer(.01), ps)
5152
batch = Batch(200)
@@ -59,7 +60,7 @@ We now integrate the vector field:
5960

6061
```julia hnn
6162
using GeometricIntegrators
62-
hvf_closure(input) = build_nn_function(hvf, x, nn)(input, nn_dummy.params)
63+
hvf_closure(input) = build_nn_function(hvf, x, nn)(input, params(nn_dummy))
6364
function v(v, t, q, params)
6465
v .= hvf_closure(q)
6566
end

scripts/pullback_comparison.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
using SymbolicNeuralNetworks
22
using AbstractNeuralNetworks
33
using GeometricMachineLearning
4-
using AbstractNeuralNetworks: FeedForwardLoss
4+
using AbstractNeuralNetworks: FeedForwardLoss, params
55
using GeometricMachineLearning: ZygotePullback
66
import Random
77
Random.seed!(123)
@@ -25,9 +25,9 @@ _do = 1.
2525
# @time zpb_evaluated = zpb(params(nn_cpu), nn.model, (input, output))[2](_do)[1].params
2626
# @assert values(spb_evaluated) .≈ values(zpb_evaluated)
2727

28-
function timenn(pb, params, model, input, output, _do = 1.)
29-
pb(params, model, (input, output))[2](_do)
30-
@time pb(params, model, (input, output))[2](_do)
28+
function timenn(pb, _params, model, input, output, _do = 1.)
29+
pb(_params, model, (input, output))[2](_do)
30+
@time pb(_params, model, (input, output))[2](_do)
3131
end
3232

3333
timenn(spb, params(nn_cpu), nn.model, input, output)

src/build_function/build_function.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ built_function([1. 2.; 3. 4.], params(nn), 1)
5454
# output
5555
5656
1-element Vector{Float64}:
57-
-0.9999967113439513
57+
0.9912108161055604
5858
```
5959
6060
Note that we have to supply an extra argument (index) to `_build_nn_function` that we do not have to supply to [`build_nn_function`](@ref).

src/build_function/build_function_arrays.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ funcs_evaluated = funcs(input, params(nn))
2222
# output
2323
2424
2-element Vector{NamedTuple}:
25-
(a = [-0.9999386280616135], b = [0.9998772598897417])
26-
(c = [-0.9998158954841537],)
25+
(a = [0.985678060655224], b = [0.9715612392570434])
26+
(c = [0.9576465981186686],)
2727
```
2828
"""
2929
function build_nn_function(eqs::AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
@@ -58,7 +58,7 @@ funcs_evaluated = funcs(input, params(nn))
5858
5959
# output
6060
61-
(a = [-0.9999386280616135], b = [0.9998772598897417])
61+
(a = [0.985678060655224], b = [0.9715612392570434])
6262
```
6363
6464
# Implementation
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
11
function _latexraw(args::AbstractNeuralNetworks.GenericActivation; kwargs...)
22
_latexraw(args.σ; kwargs...)
3+
end
4+
5+
function _latexraw(args::AbstractNeuralNetworks.TanhActivation; kwargs...)
6+
_latexraw(tanh; kwargs...)
37
end

src/derivatives/gradient.jl

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ Compute the symbolic output of `nn` and differentiate it with respect to the par
1818
```julia
1919
using SymbolicNeuralNetworks: SymbolicNeuralNetwork, Gradient, derivative
2020
using AbstractNeuralNetworks
21-
using Latexify: latexify
2221
2322
c = Chain(Dense(2, 1, tanh))
2423
nn = SymbolicNeuralNetwork(c)
@@ -81,7 +80,6 @@ using SymbolicNeuralNetworks: SymbolicNeuralNetwork, symbolic_pullback
8180
using AbstractNeuralNetworks
8281
using AbstractNeuralNetworks: params
8382
using LinearAlgebra: norm
84-
using Latexify: latexify
8583
8684
c = Chain(Dense(2, 1, tanh))
8785
nn = SymbolicNeuralNetwork(c)

src/derivatives/jacobian.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ We can use `Jacobian` together with [`build_nn_function`](@ref):
4545
```jldoctest
4646
using SymbolicNeuralNetworks
4747
using SymbolicNeuralNetworks: Jacobian, derivative
48-
using AbstractNeuralNetworks: Dense, Chain, NeuralNetwork
48+
using AbstractNeuralNetworks: Dense, Chain, NeuralNetwork, params
4949
using Symbolics
5050
import Random
5151
@@ -59,7 +59,7 @@ nn = SymbolicNeuralNetwork(c)
5959
□ = SymbolicNeuralNetworks.Jacobian(nn)
6060
# here we need to access the derivative and convert it into a function
6161
jacobian1 = build_nn_function(derivative(□), nn)
62-
ps = NeuralNetwork(c, Float64).params
62+
ps = params(NeuralNetwork(c, Float64))
6363
input = rand(input_dim)
6464
#derivative
6565
Dtanh(x::Real) = 4 * exp(2 * x) / (1 + exp(2x)) ^ 2

src/derivatives/pullback.jl

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,11 @@ SymbolicPullback(nn::SymbolicNeuralNetwork) = SymbolicPullback(nn, AbstractNeura
108108
Return the `NamedTuple` that's equivalent to the `NeuralNetworkParameters`.
109109
"""
110110
_get_params(nt::NamedTuple) = nt
111+
function _get_params(nt::NamedTuple{(:params,), Tuple{AT}}) where {AT <: Any}
112+
@warn "This function was most likely called because @adjoint for `NeuralNetworkParameters` hasn't been implemented."
113+
nt.params
114+
end
111115
_get_params(ps::NeuralNetworkParameters) = params(ps)
112-
_get_params(ps::NamedTuple{(:params,), Tuple{NT}}) where {NT<:NamedTuple} = ps.params
113116
_get_params(ps::AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}) = [_get_params(nt) for nt in ps]
114117

115118
"""

0 commit comments

Comments
 (0)