Skip to content

Commit 15a8c6e

Browse files
authored
Merge pull request #24 from JuliaGNI/compathelper/new_version/2025-02-07-00-42-34-326-01218599105
CompatHelper: bump compat for AbstractNeuralNetworks to 0.6, (keep existing compat)
2 parents 26565f0 + 233b917 commit 15a8c6e

File tree

12 files changed

+38
-49
lines changed

12 files changed

+38
-49
lines changed

Project.toml

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@ uuid = "aed23131-dcd0-47ca-8090-d21e605652e3"
33
authors = ["Michael Kraus"]
44
version = "0.2.0"
55

6+
[weakdeps]
7+
SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b"
8+
69
[deps]
710
AbstractNeuralNetworks = "60874f82-5ada-4c70-bd1c-fa6be7711c8a"
811
Latexify = "23fbe1c1-3f47-55db-b15f-69d7ec21a316"
@@ -11,15 +14,16 @@ RuntimeGeneratedFunctions = "7e49a35a-f44a-4d26-94aa-eba1b4ca6b47"
1114
Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"
1215

1316
[compat]
14-
AbstractNeuralNetworks = "0.3, 0.4, 0.5"
17+
AbstractNeuralNetworks = "0.3, 0.4, 0.5, 0.6"
1518
Documenter = "1.8.0"
1619
ForwardDiff = "0.10.38"
17-
GeometricMachineLearning = "0.3.7"
20+
GeometricMachineLearning = "0.4"
1821
Latexify = "0.16.5"
1922
RuntimeGeneratedFunctions = "0.5"
23+
SafeTestsets = "0.1"
24+
SymbolicUtils = "<3.8.0"
2025
Symbolics = "5, 6"
2126
Zygote = "0.6.73"
22-
julia = "1.6"
2327

2428
[extras]
2529
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"

docs/src/double_derivative.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
```@example jacobian_gradient
1919
using AbstractNeuralNetworks
2020
using SymbolicNeuralNetworks
21-
using SymbolicNeuralNetworks: Jacobian, Gradient, derivative, params
21+
using SymbolicNeuralNetworks: Jacobian, Gradient, derivative
2222
using Latexify: latexify
2323
2424
c = Chain(Dense(2, 1, tanh; use_bias = false))
@@ -59,7 +59,7 @@ We can easily differentiate a neural network twice by using [`SymbolicNeuralNetw
5959
```@example jacobian_gradient
6060
using AbstractNeuralNetworks
6161
using SymbolicNeuralNetworks
62-
using SymbolicNeuralNetworks: Jacobian, Gradient, derivative
62+
using SymbolicNeuralNetworks: Jacobian, Gradient, derivative, params
6363
using Latexify: latexify
6464
6565
c = Chain(Dense(2, 1, tanh))

docs/src/hamiltonian_neural_network.md

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ Here we build a Hamiltonian neural network as a symbolic neural network.
44

55
```julia hnn
66
using SymbolicNeuralNetworks
7+
using SymbolicNeuralNetworks: params
78
using GeometricMachineLearning
89
using AbstractNeuralNetworks: Dense, UnknownArchitecture, Model
910
using LinearAlgebra: norm
@@ -45,7 +46,7 @@ nothing # hide
4546
We can now train the network:
4647

4748
```julia hnn
48-
ps = NeuralNetwork(c, T).params
49+
ps = params(NeuralNetwork(c, T))
4950
dl = DataLoader(z_data, hvf_analytic(z_data))
5051
o = Optimizer(AdamOptimizer(.01), ps)
5152
batch = Batch(200)
@@ -59,7 +60,7 @@ We now integrate the vector field:
5960

6061
```julia hnn
6162
using GeometricIntegrators
62-
hvf_closure(input) = build_nn_function(hvf, x, nn)(input, nn_dummy.params)
63+
hvf_closure(input) = build_nn_function(hvf, x, nn)(input, params(nn_dummy))
6364
function v(v, t, q, params)
6465
v .= hvf_closure(q)
6566
end

scripts/pullback_comparison.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
using SymbolicNeuralNetworks
22
using AbstractNeuralNetworks
33
using GeometricMachineLearning
4-
using AbstractNeuralNetworks: FeedForwardLoss
4+
using AbstractNeuralNetworks: FeedForwardLoss, params
55
using GeometricMachineLearning: ZygotePullback
66
import Random
77
Random.seed!(123)
@@ -25,9 +25,9 @@ _do = 1.
2525
# @time zpb_evaluated = zpb(params(nn_cpu), nn.model, (input, output))[2](_do)[1].params
2626
# @assert values(spb_evaluated) .≈ values(zpb_evaluated)
2727

28-
function timenn(pb, params, model, input, output, _do = 1.)
29-
pb(params, model, (input, output))[2](_do)
30-
@time pb(params, model, (input, output))[2](_do)
28+
function timenn(pb, _params, model, input, output, _do = 1.)
29+
pb(_params, model, (input, output))[2](_do)
30+
@time pb(_params, model, (input, output))[2](_do)
3131
end
3232

3333
timenn(spb, params(nn_cpu), nn.model, input, output)

src/build_function/build_function.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ built_function([1. 2.; 3. 4.], params(nn), 1)
5454
# output
5555
5656
1-element Vector{Float64}:
57-
-0.9999967113439513
57+
0.9912108161055604
5858
```
5959
6060
Note that we have to supply an extra argument (index) to `_build_nn_function` that we do not have to supply to [`build_nn_function`](@ref).

src/build_function/build_function_arrays.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ funcs_evaluated = funcs(input, params(nn))
2222
# output
2323
2424
2-element Vector{NamedTuple}:
25-
(a = [-0.9999386280616135], b = [0.9998772598897417])
26-
(c = [-0.9998158954841537],)
25+
(a = [0.985678060655224], b = [0.9715612392570434])
26+
(c = [0.9576465981186686],)
2727
```
2828
"""
2929
function build_nn_function(eqs::AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, sparams::NeuralNetworkParameters, sinput::Symbolics.Arr...)
@@ -58,7 +58,7 @@ funcs_evaluated = funcs(input, params(nn))
5858
5959
# output
6060
61-
(a = [-0.9999386280616135], b = [0.9998772598897417])
61+
(a = [0.985678060655224], b = [0.9715612392570434])
6262
```
6363
6464
# Implementation
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
11
function _latexraw(args::AbstractNeuralNetworks.GenericActivation; kwargs...)
22
_latexraw(args.σ; kwargs...)
3+
end
4+
5+
function _latexraw(args::AbstractNeuralNetworks.TanhActivation; kwargs...)
6+
_latexraw(tanh; kwargs...)
37
end

src/derivatives/gradient.jl

Lines changed: 3 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -15,25 +15,13 @@ Compute the symbolic output of `nn` and differentiate it with respect to the par
1515
1616
# Examples
1717
18-
```jldoctest
18+
```julia
1919
using SymbolicNeuralNetworks: SymbolicNeuralNetwork, Gradient, derivative
2020
using AbstractNeuralNetworks
21-
using Latexify: latexify
2221
2322
c = Chain(Dense(2, 1, tanh))
2423
nn = SymbolicNeuralNetwork(c)
25-
(Gradient(nn) |> derivative)[1].L1.b |> latexify
26-
27-
# output
28-
29-
L"\begin{equation}
30-
\left[
31-
\begin{array}{c}
32-
1 - \tanh^{2}\left( \mathtt{W\_2}_{1} + \mathtt{W\_1}_{1,1} \mathtt{sinput}_{1} + \mathtt{W\_1}_{1,2} \mathtt{sinput}_{2} \right) \\
33-
\end{array}
34-
\right]
35-
\end{equation}
36-
"
24+
(Gradient(nn) |> derivative)[1].L1.b
3725
```
3826
3927
# Implementation
@@ -87,7 +75,7 @@ This is used by [`Gradient`](@ref) and [`SymbolicPullback`](@ref).
8775
8876
# Examples
8977
90-
```jldoctest
78+
```julia
9179
using SymbolicNeuralNetworks: SymbolicNeuralNetwork, symbolic_pullback
9280
using AbstractNeuralNetworks
9381
using AbstractNeuralNetworks: params
@@ -100,17 +88,6 @@ output = c(nn.input, params(nn))
10088
spb = symbolic_pullback(output, nn)
10189
10290
spb[1].L1.b |> latexify
103-
104-
# output
105-
106-
L"\begin{equation}
107-
\left[
108-
\begin{array}{c}
109-
1 - \tanh^{2}\left( \mathtt{W\_2}_{1} + \mathtt{W\_1}_{1,1} \mathtt{sinput}_{1} + \mathtt{W\_1}_{1,2} \mathtt{sinput}_{2} \right) \\
110-
\end{array}
111-
\right]
112-
\end{equation}
113-
"
11491
```
11592
"""
11693
function symbolic_pullback(soutput::EqT, nn::AbstractSymbolicNeuralNetwork)::Union{AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}, Union{NamedTuple, NeuralNetworkParameters}}

src/derivatives/jacobian.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ We can use `Jacobian` together with [`build_nn_function`](@ref):
4545
```jldoctest
4646
using SymbolicNeuralNetworks
4747
using SymbolicNeuralNetworks: Jacobian, derivative
48-
using AbstractNeuralNetworks: Dense, Chain, NeuralNetwork
48+
using AbstractNeuralNetworks: Dense, Chain, NeuralNetwork, params
4949
using Symbolics
5050
import Random
5151
@@ -59,7 +59,7 @@ nn = SymbolicNeuralNetwork(c)
5959
□ = SymbolicNeuralNetworks.Jacobian(nn)
6060
# here we need to access the derivative and convert it into a function
6161
jacobian1 = build_nn_function(derivative(□), nn)
62-
ps = NeuralNetwork(c, Float64).params
62+
ps = params(NeuralNetwork(c, Float64))
6363
input = rand(input_dim)
6464
#derivative
6565
Dtanh(x::Real) = 4 * exp(2 * x) / (1 + exp(2x)) ^ 2

src/derivatives/pullback.jl

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,11 @@ SymbolicPullback(nn::SymbolicNeuralNetwork) = SymbolicPullback(nn, AbstractNeura
108108
Return the `NamedTuple` that's equivalent to the `NeuralNetworkParameters`.
109109
"""
110110
_get_params(nt::NamedTuple) = nt
111+
function _get_params(nt::NamedTuple{(:params,), Tuple{AT}}) where {AT <: Any}
112+
@warn "This function was most likely called because @adjoint for `NeuralNetworkParameters` hasn't been implemented."
113+
nt.params
114+
end
111115
_get_params(ps::NeuralNetworkParameters) = params(ps)
112-
_get_params(ps::NamedTuple{(:params,), Tuple{NT}}) where {NT<:NamedTuple} = ps.params
113116
_get_params(ps::AbstractArray{<:Union{NamedTuple, NeuralNetworkParameters}}) = [_get_params(nt) for nt in ps]
114117

115118
"""

0 commit comments

Comments
 (0)