Skip to content

Commit d7a3bb5

Browse files
documentation update (#2422)
* update docs * update docs * update docs * fixes for mlp tutorial * compat * blitz update and model zoo page
1 parent 90a3dfe commit d7a3bb5

40 files changed

+449
-612
lines changed

Project.toml

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
77
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
88
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
99
Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196"
10+
ImageCore = "a09fc81d-aa75-5fe9-8630-4744c3626534"
11+
ImageShow = "4e3cecfd-b093-5904-9786-8bbb286a6a31"
1012
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1113
MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54"
1214
MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
@@ -48,7 +50,7 @@ MacroTools = "0.5"
4850
Metal = "0.5, 1"
4951
NNlib = "0.9.1"
5052
OneHotArrays = "0.2.4"
51-
Optimisers = "0.3.2"
53+
Optimisers = "0.3.3"
5254
Preferences = "1"
5355
ProgressLogging = "0.1"
5456
Reexport = "1.0"
@@ -77,7 +79,4 @@ Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c"
7779
cuDNN = "02a925ec-e4fe-4b08-9a7e-0d78e3d38ccd"
7880

7981
[targets]
80-
test = ["Test", "Documenter", "IterTools", "LinearAlgebra", "FillArrays",
81-
"ComponentArrays", "BSON", "Pkg", "CUDA", "cuDNN", "Metal", "AMDGPU",
82-
"Enzyme", "FiniteDifferences", "Tracker"]
83-
82+
test = ["Test", "Documenter", "IterTools", "LinearAlgebra", "FillArrays", "ComponentArrays", "BSON", "Pkg", "CUDA", "cuDNN", "Metal", "AMDGPU", "Enzyme", "FiniteDifferences", "Tracker"]

docs/Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,4 +16,4 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
1616
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
1717

1818
[compat]
19-
Documenter = "0.27"
19+
Documenter = "1.3"

docs/make.jl

Lines changed: 30 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -13,48 +13,48 @@ makedocs(
1313
# You could read this end-to-end, or skip to what you need.
1414
# Aim is to cover each new concept exactly once (but not list all variants).
1515
# Hard to invent further divisions which aren't more confusing than helpful?
16-
"Quick Start" => "models/quickstart.md",
17-
"Fitting a Line" => "models/overview.md",
18-
"Gradients and Layers" => "models/basics.md",
19-
"Training" => "training/training.md",
20-
"Recurrence" => "models/recurrence.md",
21-
"GPU Support" => "gpu.md",
22-
"Saving & Loading" => "saving.md",
23-
"Performance Tips" => "performance.md",
16+
"Quick Start" => "guide/models/quickstart.md",
17+
"Fitting a Line" => "guide/models/overview.md",
18+
"Gradients and Layers" => "guide/models/basics.md",
19+
"Custom Layers" => "guide/models/custom_layers.md",
20+
"Training" => "guide/training/training.md",
21+
"Recurrence" => "guide/models/recurrence.md",
22+
"GPU Support" => "guide/gpu.md",
23+
"Saving & Loading" => "guide/saving.md",
24+
"Performance Tips" => "guide/performance.md",
2425
],
2526
"Ecosystem" => "ecosystem.md",
2627
"Reference" => [
2728
# This essentially collects docstrings, with a bit of introduction.
28-
"Built-in Layers" => "models/layers.md",
29-
"Activation Functions" => "models/activation.md",
30-
"Weight Initialisation" => "utilities.md",
31-
"Loss Functions" => "models/losses.md",
32-
"Training API" => "training/reference.md",
33-
"Optimisation Rules" => "training/optimisers.md",
34-
"Shape Inference" => "outputsize.md",
35-
"Flat vs. Nested" => "destructure.md",
36-
"Callback Helpers" => "training/callbacks.md",
37-
"Gradients -- Zygote.jl" => "training/zygote.md",
38-
"Batching Data -- MLUtils.jl" => "data/mlutils.md",
39-
"OneHotArrays.jl" => "data/onehot.md",
40-
"Low-level Operations -- NNlib.jl" => "models/nnlib.md",
41-
"Nested Structures -- Functors.jl" => "models/functors.md",
29+
"Built-in Layers" => "reference/models/layers.md",
30+
"Activation Functions" => "reference/models/activation.md",
31+
"Weight Initialisation" => "reference/utilities.md",
32+
"Loss Functions" => "reference/models/losses.md",
33+
"Training API" => "reference/training/reference.md",
34+
"Optimisation Rules" => "reference/training/optimisers.md",
35+
"Shape Inference" => "reference/outputsize.md",
36+
"Flat vs. Nested" => "reference/destructure.md",
37+
"Callback Helpers" => "reference/training/callbacks.md",
38+
"Gradients -- Zygote.jl" => "reference/training/zygote.md",
39+
"Batching Data -- MLUtils.jl" => "reference/data/mlutils.md",
40+
"OneHotArrays.jl" => "reference/data/onehot.md",
41+
"Low-level Operations -- NNlib.jl" => "reference/models/nnlib.md",
42+
"Nested Structures -- Functors.jl" => "reference/models/functors.md",
4243
],
4344
"Tutorials" => [
4445
# These walk you through various tasks. It's fine if they overlap quite a lot.
4546
# All the website tutorials can move here, perhaps much of the model zoo too?
4647
# Or perhaps those should just be trashed, model zoo versions are newer & more useful.
4748
"Linear Regression" => "tutorials/linear_regression.md",
4849
"Logistic Regression" => "tutorials/logistic_regression.md",
50+
"Model Zoo" => "tutorials/model_zoo.md",
4951
#=
50-
"Julia & Flux: 60 Minute Blitz" => "tutorials/2020-09-15-deep-learning-flux.md",
51-
"Multi-layer Perceptron" => "tutorials/2021-01-26-mlp.md",
52+
# "Multi-layer Perceptron" => "tutorials/mlp.md",
53+
# "Julia & Flux: 60 Minute Blitz" => "tutorials/blitz.md",
5254
"Simple ConvNet" => "tutorials/2021-02-07-convnet.md",
5355
"Generative Adversarial Net" => "tutorials/2021-10-14-vanilla-gan.md",
5456
"Deep Convolutional GAN" => "tutorials/2021-10-08-dcgan-mnist.md",
5557
=#
56-
# Not really sure where this belongs... some in Fluxperimental, aim to delete?
57-
"Custom Layers" => "models/advanced.md", # TODO move freezing to Training
5858
],
5959
],
6060
format = Documenter.HTML(
@@ -63,19 +63,10 @@ makedocs(
6363
assets = ["assets/flux.css"],
6464
prettyurls = get(ENV, "CI", nothing) == "true"
6565
),
66-
doctest = false,
67-
# linkcheck = true,
68-
checkdocs = :exports,
69-
# strict = true,
70-
# strict = [
71-
# :cross_references,
72-
# :missing_docs,
73-
# :doctest,
74-
# :linkcheck,
75-
# :parse_error,
76-
# :example_block,
77-
# :autodocs_block, :docs_block, :eval_block, :example_block, :footnote, :meta_block, :setup_block
78-
# ],
66+
doctest = false, # done later
67+
checkdocs = :none, # :exports # Do not check if all functions appear in the docs
68+
# since it considers all packages
69+
warnonly = [:cross_references]
7970
)
8071

8172
doctest(Flux) # only test Flux modules

docs/src/tutorials/2021-10-08-dcgan-mnist.md renamed to docs/old_tutorials/2021-10-08-dcgan-mnist.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ dcgan_init(shape...) = randn(Float32, shape) * 0.02f0
109109
```julia
110110
function Generator(latent_dim)
111111
Chain(
112-
Dense(latent_dim, 7*7*256, bias=false),
112+
Dense(latent_dim => 7*7*256, bias=false),
113113
BatchNorm(7*7*256, relu),
114114

115115
x -> reshape(x, 7, 7, 256, :),

docs/src/tutorials/2021-10-14-vanilla-gan.md renamed to docs/old_tutorials/2021-10-14-vanilla-gan.md

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -96,13 +96,13 @@ calling the model in a gradient context. As a final non-linearity, we use the
9696
`sigmoid` activation function.
9797

9898
```julia
99-
discriminator = Chain(Dense(n_features, 1024, x -> leakyrelu(x, 0.2f0)),
99+
discriminator = Chain(Dense(n_features => 1024, x -> leakyrelu(x, 0.2f0)),
100100
Dropout(0.3),
101-
Dense(1024, 512, x -> leakyrelu(x, 0.2f0)),
101+
Dense(1024 => 512, x -> leakyrelu(x, 0.2f0)),
102102
Dropout(0.3),
103-
Dense(512, 256, x -> leakyrelu(x, 0.2f0)),
103+
Dense(512 => 256, x -> leakyrelu(x, 0.2f0)),
104104
Dropout(0.3),
105-
Dense(256, 1, sigmoid)) |> gpu
105+
Dense(256 => 1, sigmoid)) |> gpu
106106
```
107107

108108
Let's define the generator in a similar fashion. This network maps a latent
@@ -113,9 +113,9 @@ the training data onto.
113113

114114
```julia
115115
generator = Chain(Dense(latent_dim, 256, x -> leakyrelu(x, 0.2f0)),
116-
Dense(256, 512, x -> leakyrelu(x, 0.2f0)),
117-
Dense(512, 1024, x -> leakyrelu(x, 0.2f0)),
118-
Dense(1024, n_features, tanh)) |> gpu
116+
Dense(256 => 512, x -> leakyrelu(x, 0.2f0)),
117+
Dense(512 => 1024, x -> leakyrelu(x, 0.2f0)),
118+
Dense(1024 => n_features, tanh)) |> gpu
119119
```
120120

121121

0 commit comments

Comments
 (0)