Skip to content

Commit 4bd6cb2

Browse files
committed
Adjusted docstrings and docs to new ANN interface.
1 parent 4d9cde2 commit 4bd6cb2

File tree

8 files changed

+18
-18
lines changed

8 files changed

+18
-18
lines changed

docs/src/hamiltonian_neural_network.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ Here we build a Hamiltonian neural network as a symbolic neural network.
55
```julia hnn
66
using SymbolicNeuralNetworks
77
using GeometricMachineLearning
8-
using AbstractNeuralNetworks: Dense, initialparameters, UnknownArchitecture, Model
8+
using AbstractNeuralNetworks: Dense, UnknownArchitecture, Model
99
using LinearAlgebra: norm
1010
using ChainRulesCore
1111
using KernelAbstractions
@@ -45,7 +45,7 @@ nothing # hide
4545
We can now train the network:
4646

4747
```julia hnn
48-
ps = NeuralNetworkParameters(initialparameters(c, T))
48+
ps = NeuralNetwork(c, T).params
4949
dl = DataLoader(z_data, hvf_analytic(z_data))
5050
o = Optimizer(AdamOptimizer(.01), ps)
5151
batch = Batch(200)

docs/src/symbolic_neural_networks.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ We first call the symbolic neural network that only consists of one layer:
66

77
```@example snn
88
using SymbolicNeuralNetworks
9-
using AbstractNeuralNetworks: Chain, Dense, initialparameters
9+
using AbstractNeuralNetworks: Chain, Dense
1010
1111
input_dim = 2
1212
output_dim = 1

src/derivatives/jacobian.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ We can use `Jacobian` together with [`build_nn_function`](@ref):
4545
```jldoctest
4646
using SymbolicNeuralNetworks
4747
using SymbolicNeuralNetworks: Jacobian, derivative
48-
using AbstractNeuralNetworks: Dense, Chain, initialparameters
48+
using AbstractNeuralNetworks: Dense, Chain, NeuralNetwork
4949
using Symbolics
5050
import Random
5151
@@ -59,7 +59,7 @@ nn = SymbolicNeuralNetwork(c)
5959
□ = SymbolicNeuralNetworks.Jacobian(nn)
6060
# here we need to access the derivative and convert it into a function
6161
jacobian1 = build_nn_function(derivative(□), nn)
62-
ps = initialparameters(c, Float64)
62+
ps = NeuralNetwork(c, Float64).params
6363
input = rand(input_dim)
6464
#derivative
6565
Dtanh(x::Real) = 4 * exp(2 * x) / (1 + exp(2x)) ^ 2

src/pullback.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ c = Chain(Dense(2, 1, tanh))
1515
nn = SymbolicNeuralNetwork(c)
1616
loss = FeedForwardLoss()
1717
pb = SymbolicPullback(nn, loss)
18-
ps = initialparameters(c) |> NeuralNetworkParameters
18+
ps = NeuralNetwork(c).params
1919
pv_values = pb(ps, nn.model, (rand(2), rand(1)))[2](1) |> typeof
2020
2121
# output
@@ -50,7 +50,7 @@ c = Chain(Dense(2, 1, tanh))
5050
nn = SymbolicNeuralNetwork(c)
5151
loss = FeedForwardLoss()
5252
pb = SymbolicPullback(nn, loss)
53-
ps = initialparameters(c) |> NeuralNetworkParameters
53+
ps = NeuralNetwork(c).params
5454
input_output = (rand(2), rand(1))
5555
loss_and_pullback = pb(ps, nn.model, input_output)
5656
pv_values = loss_and_pullback[2](1)

src/utils/build_function.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ params = symbolicparameters(c)
4848
@variables sinput[1:2]
4949
eq = c(sinput, params)
5050
built_function = _build_nn_function(eq, params, sinput)
51-
ps = initialparameters(c)
51+
ps = NeuralNetwork(c).params
5252
input = rand(2, 2)
5353
5454
(built_function(input, ps, 1), built_function(input, ps, 2)) .≈ (c(input[:, 1], ps), c(input[:, 2], ps))

src/utils/build_function_arrays.jl

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ Build an executable function based on `eqs` that potentially also has a symbolic
77
88
```jldoctest
99
using SymbolicNeuralNetworks: build_nn_function, SymbolicNeuralNetwork
10-
using AbstractNeuralNetworks: Chain, Dense, initialparameters, NeuralNetworkParameters
10+
using AbstractNeuralNetworks: Chain, Dense, NeuralNetwork
1111
import Random
1212
Random.seed!(123)
1313
@@ -16,7 +16,7 @@ nn = SymbolicNeuralNetwork(ch)
1616
eqs = [(a = ch(nn.input, nn.params), b = ch(nn.input, nn.params).^2), (c = ch(nn.input, nn.params).^3, )]
1717
funcs = build_nn_function(eqs, nn.params, nn.input)
1818
input = [1., 2.]
19-
ps = initialparameters(ch) |> NeuralNetworkParameters
19+
ps = NeuralNetwork(ch).params
2020
a = ch(input, ps)
2121
b = ch(input, ps).^2
2222
c = ch(input, ps).^3
@@ -47,7 +47,7 @@ Return a function that takes an input, (optionally) an output and neural network
4747
4848
```jldoctest
4949
using SymbolicNeuralNetworks: build_nn_function, SymbolicNeuralNetwork
50-
using AbstractNeuralNetworks: Chain, Dense, initialparameters, NeuralNetworkParameters
50+
using AbstractNeuralNetworks: Chain, Dense, NeuralNetwork
5151
import Random
5252
Random.seed!(123)
5353
@@ -56,7 +56,7 @@ nn = SymbolicNeuralNetwork(c)
5656
eqs = (a = c(nn.input, nn.params), b = c(nn.input, nn.params).^2)
5757
funcs = build_nn_function(eqs, nn.params, nn.input)
5858
input = [1., 2.]
59-
ps = initialparameters(c) |> NeuralNetworkParameters
59+
ps = NeuralNetwork(c).params
6060
a = c(input, ps)
6161
b = c(input, ps).^2
6262
funcs_evaluated = funcs(input, ps)
@@ -90,7 +90,7 @@ Return an executable function for each entry in `eqs`. This still has to be proc
9090
9191
```jldoctest
9292
using SymbolicNeuralNetworks: function_valued_parameters, SymbolicNeuralNetwork
93-
using AbstractNeuralNetworks: Chain, Dense, initialparameters, NeuralNetworkParameters
93+
using AbstractNeuralNetworks: Chain, Dense, NeuralNetwork
9494
import Random
9595
Random.seed!(123)
9696
@@ -99,7 +99,7 @@ nn = SymbolicNeuralNetwork(c)
9999
eqs = (a = c(nn.input, nn.params), b = c(nn.input, nn.params).^2)
100100
funcs = function_valued_parameters(eqs, nn.params, nn.input)
101101
input = [1., 2.]
102-
ps = initialparameters(c) |> NeuralNetworkParameters
102+
ps = NeuralNetwork(c).params
103103
a = c(input, ps)
104104
b = c(input, ps).^2
105105

test/neural_network_derivative.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
using Test, SymbolicNeuralNetworks
22
using SymbolicNeuralNetworks: Jacobian, derivative
3-
using AbstractNeuralNetworks: Chain, Dense, initialparameters, NeuralNetworkParameters
3+
using AbstractNeuralNetworks: Chain, Dense, NeuralNetwork
44
using LinearAlgebra: norm
55
import Symbolics, Random, ForwardDiff
66

@@ -26,7 +26,7 @@ function test_jacobian(n::Integer, T = Float32)
2626
nn = SymbolicNeuralNetwork(c)
2727
g = Jacobian(nn)
2828

29-
params = initialparameters(c, T) |> NeuralNetworkParameters
29+
params = NeuralNetwork(c, T).params
3030
input = rand(T, n)
3131
@test build_nn_function(g.output, nn)(input, params) c(input, params)
3232
@test build_nn_function(derivative(g), nn)(input, params) ForwardDiff.jacobian(input -> c(input, params), input)

test/symbolic_gradient.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ function test_symbolic_gradient(input_dim::Integer = 3, output_dim::Integer = 1,
1515
@assert second_dim > 1 "second_dim must be greater than 1!"
1616
c = Chain(Dense(input_dim, hidden_dim, tanh), Dense(hidden_dim, output_dim, tanh))
1717
sparams = symbolicparameters(c)
18-
ps = initialparameters(c, T) |> NeuralNetworkParameters
18+
ps = NeuralNetwork(c, T).params
1919
@variables sinput[1:input_dim]
2020
sout = norm(c(sinput, sparams)) ^ 2
2121
sdparams = symbolic_differentials(sparams)
@@ -40,7 +40,7 @@ Also checks the parallelization, but for the full function.
4040
function test_symbolic_gradient2(input_dim::Integer = 3, output_dim::Integer = 1, hidden_dim::Integer = 2, T::DataType = Float64, second_dim::Integer = 1, third_dim::Integer = 1)
4141
c = Chain(Dense(input_dim, hidden_dim, tanh), Dense(hidden_dim, output_dim, tanh))
4242
sparams = symbolicparameters(c)
43-
ps = initialparameters(c, T) |> NeuralNetworkParameters
43+
ps = NeuralNetwork(c, T).params
4444
@variables sinput[1:input_dim]
4545
sout = norm(c(sinput, sparams)) ^ 2
4646
input = rand(T, input_dim, second_dim, third_dim)

0 commit comments

Comments
 (0)