Skip to content

Commit 42bbb2c

Browse files
authored
Merge pull request #191 from tpapp/tp/use-ohmythreads
Use OhMyThreads for threaded example. Incidental: use explicit RNGs everywhere.
2 parents f398bde + d776669 commit 42bbb2c

10 files changed

+39
-37
lines changed

docs/Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@ Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
33
LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c"
44
LogDensityProblemsAD = "996a588d-648d-4e1f-a8f0-a84b347e47b1"
55
MCMCDiagnosticTools = "be115224-59cd-429b-ad48-344e309966f0"
6+
OhMyThreads = "67456a42-1dca-4109-a031-0a68de7e3ad5"
67
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
78
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
8-
ThreadTools = "dbf13d8f-d36e-4350-8970-f3a99faba1a8"
99
TransformVariables = "84d833dd-6860-57f9-a1a7-6da5db126cff"
1010
TransformedLogDensities = "f9bc47f6-f3f8-4f3b-ab21-f8bc73906f26"
1111

docs/src/worked_example.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,13 +94,13 @@ summarize_tree_statistics(results.tree_statistics)
9494

9595
Usually one would run multiple chains and check convergence and mixing using generic MCMC diagnostics not specific to NUTS.
9696

97-
The specifics of running multiple chains is up to the user: various forms of [parallel computing](https://docs.julialang.org/en/v1/manual/parallel-computing/) can be utilized depending on the problem scale and the hardware available. In the example below we use [multi-threading](https://docs.julialang.org/en/v1/manual/multi-threading/), using [ThreadTools.jl](https://github.com/baggepinnen/ThreadTools.jl); other excellent packages are available for threading.
97+
The specifics of running multiple chains is up to the user: various forms of [parallel computing](https://docs.julialang.org/en/v1/manual/parallel-computing/) can be utilized depending on the problem scale and the hardware available. In the example below we use [multi-threading](https://docs.julialang.org/en/v1/manual/multi-threading/), using [OhMyThreads.jl](https://github.com/JuliaFolds2/OhMyThreads.jl); other excellent packages are available for threading.
9898

9999
It is easy to obtain posterior results for use with [MCMCDiagnosticsTools.jl](https://github.com/TuringLang/MCMCDiagnosticTools.jl/) with [`stack_posterior_matrices`](@ref):
100100

101101
```@example bernoulli
102-
using ThreadTools, MCMCDiagnosticTools
103-
results5 = tmap1(_ -> mcmc_with_warmup(Random.default_rng(), ∇P, 1000; reporter = NoProgressReport()), 1:5)
102+
using OhMyThreads, MCMCDiagnosticTools
103+
results5 = tcollect(mcmc_with_warmup(Random.default_rng(), ∇P, 1000; reporter = NoProgressReport()) for _ in 1:5)
104104
ess_rhat(stack_posterior_matrices(results5))
105105
```
106106

test/runtests.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ using LogDensityTestSuite
1414
### general test environment
1515
###
1616

17-
const RNG = Random.default_rng() # shorthand
17+
const RNG = copy(Random.default_rng()) # shorthand
1818
Random.seed!(RNG, UInt32[0x23ef614d, 0x8332e05c, 0x3c574111, 0x121aa2f4])
1919

2020
"Tolerant testing in a CI environment."

test/sample-correctness_tests.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,9 @@ const MCMC_ARGS2 = (warmup_stages = default_warmup_stages(; M = Symmetric),)
1111

1212
@testset "NUTS tests with random normal" begin
1313
for _ in 1:10
14-
K = rand(3:10)
15-
μ = randn(K)
16-
d = abs.(randn(K))
14+
K = rand(RNG, 3:10)
15+
μ = randn(RNG, K)
16+
d = abs.(randn(RNG, K))
1717
C = rand_C(K)
1818
= multivariate_normal(μ, Diagonal(d) * C)
1919
title = "multivariate normal μ = $(μ) d = $(d) C = $(C)"
@@ -94,7 +94,7 @@ end
9494
0.0 0.0 0.7434985947205197]
9595
ℓ2 = multivariate_normal(ones(3), D2 * C2)
9696
= mix(0.2, ℓ1, ℓ2)
97-
NUTS_tests(RNG, ℓ, "mixture of two normals", 1000; τ_alert = 0.15)
97+
NUTS_tests(RNG, ℓ, "mixture of two normals", 1000; τ_alert = 0.15, p_alert = 0.005)
9898
end
9999

100100
@testset "NUTS tests with heavier tails and skewness" begin

test/sample-correctness_utilities.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ end
2828
"Random Cholesky factor for correlation matrix."
2929
function rand_C(K)
3030
t = TransformVariables.CorrCholeskyFactor(K)
31-
TransformVariables.transform(t, randn(TransformVariables.dimension(t)) ./ 4)'
31+
TransformVariables.transform(t, randn(RNG, TransformVariables.dimension(t)) ./ 4)'
3232
end
3333

3434
"""

test/test_NUTS.jl

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
using DynamicHMC: TrajectoryNUTS, rand_bool_logprob, GeneralizedTurnStatistic,
22
AcceptanceStatistic, leaf_acceptance_statistic, acceptance_rate, TreeStatisticsNUTS,
3-
NUTS, sample_tree, combine_turn_statistics, combine_visited_statistics
3+
NUTS, sample_tree, combine_turn_statistics, combine_visited_statistics, evaluate_ℓ,
4+
Hamiltonian
45

56
###
67
### random booleans
@@ -87,13 +88,13 @@ end
8788
# A test for sample_tree with a fixed ϵ and κ, which is perfectly adapted and should
8889
# provide excellent mixing
8990
for _ in 1:10
90-
K = rand(2:8)
91+
K = rand(RNG, 2:8)
9192
N = 10000
92-
μ = randn(K)
93+
μ = randn(RNG, K)
9394
Σ = rand_Σ(K)
9495
L = cholesky(Σ).L
9596
= multivariate_normal(μ, L)
96-
Q = evaluate_ℓ(ℓ, randn(K))
97+
Q = evaluate_ℓ(ℓ, randn(RNG, K))
9798
H = Hamiltonian(GaussianKineticEnergy(Σ), ℓ)
9899
qs = Array{Float64}(undef, N, K)
99100
ϵ = 0.5
@@ -103,7 +104,8 @@ end
103104
qs[i, :] = Q.q
104105
end
105106
m, C = mean_and_cov(qs, 1)
106-
@test vec(m) μ atol = 0.15 rtol = maximum(diag(C))*0.02 norm = x -> norm(x,1)
107+
tol = maximum(diag(C)) / 50
108+
@test vec(m) μ atol = tol rtol = tol norm = x -> norm(x,1)
107109
@test cov(qs, dims = 1) L*L' atol = 0.1 rtol = 0.1
108110
end
109111
end

test/test_diagnostics.jl

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,16 @@
66
N = 1000
77
directions = Directions(UInt32(0))
88
function rand_invalidtree()
9-
if rand() < 0.1
9+
if rand(RNG) < 0.1
1010
REACHED_MAX_DEPTH
1111
else
12-
left = rand(-5:5)
13-
right = left + rand(0:5)
12+
left = rand(RNG, -5:5)
13+
right = left + rand(RNG, 0:5)
1414
InvalidTree(left, right)
1515
end
1616
end
17-
tree_statistics = [TreeStatisticsNUTS(randn(), rand(0:5), rand_invalidtree(), rand(),
18-
rand(1:30), directions) for _ in 1:N]
17+
tree_statistics = [TreeStatisticsNUTS(randn(RNG), rand(RNG, 0:5), rand_invalidtree(),
18+
rand(RNG), rand(RNG, 1:30), directions) for _ in 1:N]
1919
stats = summarize_tree_statistics(tree_statistics)
2020
# acceptance rates
2121
@test stats.N == N

test/test_hamiltonian.jl

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -19,21 +19,21 @@ end
1919

2020
@testset "Gaussian KE full" begin
2121
for _ in 1:100
22-
K = rand(2:10)
22+
K = rand(RNG, 2:10)
2323
Σ = rand_Σ(Symmetric, K)
2424
κ = GaussianKineticEnergy(inv(Σ))
2525
(; M⁻¹, W) = κ
2626
@test W isa LowerTriangular
2727
@test M⁻¹ * W * W' Diagonal(ones(K))
2828
m, C = simulated_meancov(()->rand_p(RNG, κ), 10000)
2929
@test Matrix(Σ) C rtol = 0.1
30-
test_KE_gradient(κ, randn(K))
30+
test_KE_gradient(κ, randn(RNG, K))
3131
end
3232
end
3333

3434
@testset "Gaussian KE diagonal" begin
3535
for _ in 1:100
36-
K = rand(2:10)
36+
K = rand(RNG, 2:10)
3737
Σ = rand_Σ(Diagonal, K)
3838
κ = GaussianKineticEnergy(inv(Σ))
3939
(; M⁻¹, W) = κ
@@ -42,7 +42,7 @@ end
4242
@test M⁻¹ * (W * W') Diagonal(ones(K))
4343
m, C = simulated_meancov(()->rand_p(RNG, κ), 10000)
4444
@test Matrix(Σ) C rtol = 0.1
45-
test_KE_gradient(κ, randn(K))
45+
test_KE_gradient(κ, randn(RNG, K))
4646
end
4747
end
4848

@@ -57,7 +57,7 @@ end
5757
@test ℓ2 == ℓq
5858
@test ∇ℓ2 == ∇ℓq
5959
end
60-
(; H, z, Σ ) = rand_Hz(rand(3:10))
60+
(; H, z, Σ ) = rand_Hz(rand(RNG, 3:10))
6161
test_consistency(H, z)
6262
ϵ = find_stable_ϵ(H.κ, Σ)
6363
for _ in 1:10
@@ -81,10 +81,10 @@ end
8181
M = rand_Σ(Diagonal, n)
8282
m = diag(M)
8383
κ = GaussianKineticEnergy(inv(M))
84-
q = randn(n)
85-
p = randn(n)
84+
q = randn(RNG, n)
85+
p = randn(RNG, n)
8686
Σ = rand_Σ(n)
87-
= multivariate_normal(randn(n), cholesky(Σ).L)
87+
= multivariate_normal(randn(RNG, n), cholesky(Σ).L)
8888
H = Hamiltonian(κ, ℓ)
8989
ϵ = find_stable_ϵ(H.κ, Σ)
9090
z = PhasePoint(evaluate_ℓ(ℓ, q), p)
@@ -110,7 +110,7 @@ end
110110

111111
@testset "invalid values" begin
112112
n = 3
113-
= multivariate_normal(randn(n), I)
113+
= multivariate_normal(randn(RNG, n), I)
114114
@test_throws DynamicHMCError evaluate_ℓ(ℓ, fill(NaN, n))
115115
end
116116
end
@@ -134,7 +134,7 @@ end
134134
end
135135

136136
for _ in 1:100
137-
(; H, z) = rand_Hz(rand(2:5))
137+
(; H, z) = rand_Hz(rand(RNG, 2:5))
138138
ϵ = find_initial_stepsize(InitialStepsizeSearch(), local_log_acceptance_ratio(H, z))
139139
test_hamiltonian_invariance(H, z, 10, ϵ/100; atol = 0.5)
140140
end
@@ -226,7 +226,7 @@ function HMC_transition(H, z::PhasePoint, ϵ, L)
226226
z′ = leapfrog(H, z′, ϵ)
227227
end
228228
Δ = logdensity(H, z′) - π₀
229-
accept = Δ > 0 || (rand() < exp(Δ))
229+
accept = Δ > 0 || (rand(RNG) < exp(Δ))
230230
accept ? z′ : z
231231
end
232232

@@ -249,7 +249,7 @@ end
249249
# Tests the leapfrog and Hamiltonian code with HMC.
250250
K = 2
251251
= multivariate_normal(zeros(K), Diagonal(ones(K)))
252-
q = randn(K)
252+
q = randn(RNG, K)
253253
H = Hamiltonian(GaussianKineticEnergy(Diagonal(ones(K))), ℓ)
254254
qs = HMC_sample(H, q, 10000, find_stable_ϵ(H.κ, Diagonal(ones(K))) / 5)
255255
m, C = mean_and_cov(qs, 1)

test/test_stepsize.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ $(SIGNATURES)
3030
A parametric random acceptance rate that depends on the stepsize. For unit
3131
testing acceptance rate tuning.
3232
"""
33-
dummy_acceptance_rate(ϵ, σ = 0.05) = min(1/ϵ * exp(randn()*σ - σ^2/2), 1)
33+
dummy_acceptance_rate(ϵ, σ = 0.05) = min(1/ϵ * exp(randn(RNG)*σ - σ^2/2), 1)
3434

3535
mean_dummy_acceptance_rate(ϵ, σ = 0.05) = mean(dummy_acceptance_rate(ϵ, σ) for _ in 1:10000)
3636

@@ -83,7 +83,7 @@ end
8383
p = InitialStepsizeSearch()
8484
_bkt(A, ϵ, C) = (A(ϵ) - p.log_threshold) * (A* C) - p.log_threshold) 0
8585
for _ in 1:100
86-
(; H, z) = rand_Hz(rand(3:5))
86+
(; H, z) = rand_Hz(rand(RNG, 3:5))
8787
A = local_log_acceptance_ratio(H, z)
8888
ϵ = find_initial_stepsize(p, A)
8989
@test _bkt(A, ϵ, 0.5) || _bkt(A, ϵ, 2.0)

test/utilities.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,11 @@ $(SIGNATURES)
44
Random positive definite matrix of size `n` x `n` (for testing).
55
"""
66
function rand_Σ(::Type{Symmetric}, n)
7-
A = randn(n, n)
7+
A = randn(RNG, n, n)
88
Symmetric(A'*A .+ 0.01)
99
end
1010

11-
rand_Σ(::Type{Diagonal}, n) = Diagonal(randn(n).^2 .+ 0.01)
11+
rand_Σ(::Type{Diagonal}, n) = Diagonal(randn(RNG, n).^2 .+ 0.01)
1212

1313
rand_Σ(n::Int) = rand_Σ(Symmetric, n)
1414

@@ -31,7 +31,7 @@ end
3131
@testset "simulated meancov" begin
3232
μ = [2, 1.2]
3333
D = [2.0, 0.7]
34-
m, C = simulated_meancov(()-> randn(2) .* D .+ μ, 10000)
34+
m, C = simulated_meancov(()-> randn(RNG, 2) .* D .+ μ, 10000)
3535
@test m μ atol = 0.05 rtol = 0.1
3636
@test C Diagonal(abs2.(D)) atol = 0.05 rtol = 0.1
3737
end

0 commit comments

Comments
 (0)