Skip to content

Commit

Permalink
refactor: use ReTest (#287)
Browse files Browse the repository at this point in the history
* refactor: use ReTest

* fix: not use include in @testset

* Remove seed for retest

* Try a different seed

* Add top level @testset to avoid scope issues

* fix @testset scope

* move @testset into turing/runtests.jl

* pass h explictly instead of using global one

* only use ReTest for AHMC part not Turing

* Fix Comonicon issue

* Fix variable scope issue

* Move Pkg to correct place

* Put test_show into a @testset

* Remove skip test @info as is handled by ReTest now

* Move test skip inside @testset for better summary

* Improve test summary

* Improve @testset names

* Fix typo

* Fix variable scope

* Use local variable instead of global

* Improve test summary

* Add verbose=false to demo tests

* Add README for tests

* Fix Markdown syntax

* Disable CI for julia-nightly on Windows

Co-authored-by: Kai Xu <[email protected]>
  • Loading branch information
xukai92 and Kai Xu authored Apr 9, 2022
1 parent 8cae9ac commit d5d4213
Show file tree
Hide file tree
Showing 18 changed files with 618 additions and 594 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ jobs:
arch: x86
- os: windows-latest
arch: x86
# GitHub Action seems to have issue of running julia-nightly with windows-latest
# TODO Revisit in the future
- version: 'nightly'
os: windows-latest
steps:
- uses: actions/checkout@v2
- uses: julia-actions/setup-julia@v1
Expand Down
4 changes: 2 additions & 2 deletions test/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,17 @@
AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001"
Bijectors = "76274a88-744f-5084-9051-94815aaf08c4"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
Comonicon = "863f3e99-da2a-4334-8734-de3dacbe5542"
ComponentArrays = "b0b7db55-cfe3-40fc-9ded-d10e2dbeff66"
Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
OrdinaryDiffEq = "1dea7af3-3e70-54e6-95c3-0bf5283fa5ed"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
ReTest = "e0db7c4e-2690-44b9-bad6-7687da720f89"
Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
UnicodePlots = "b8865327-cd53-5732-bb35-84acbb429228"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
19 changes: 19 additions & 0 deletions test/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# How to run tests locally

Assuming you are in the local folder of the root of AdvancedHMC.jl,
you can use the following command to run all tests locally:

``` sh
julia --project=@. -e 'using Pkg; Pkg.test(; test_args=ARGS)'
```
If you are in a different folder,
you can change `@.` to the root of AdvancedHMC.jl.

Further, the testing is set up to accept positional arguments to run a subset of tests by filtering.
For example, below runs only tests for `Adaptation`:

``` sh
julia --project=@. -e 'using Pkg; Pkg.test(; test_args=ARGS)' "Adaptation"
```

See [PR #287](https://github.com/TuringLang/AdvancedHMC.jl/pull/287) that introduces this functionality via [ReTest.jl](https://juliatesting.github.io/ReTest.jl/stable/) for more information.
4 changes: 2 additions & 2 deletions test/abstractmcmc.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
using Test, Random, AdvancedHMC, ForwardDiff, AbstractMCMC
using ReTest, Random, AdvancedHMC, ForwardDiff, AbstractMCMC
using Statistics: mean
include("common.jl")

@testset "`gdemo`" begin
@testset "AbstractMCMC w/ gdemo" begin
rng = MersenneTwister(0)

n_samples = 5_000
Expand Down
227 changes: 114 additions & 113 deletions test/adaptation.jl
Original file line number Diff line number Diff line change
@@ -1,120 +1,121 @@
using Test, LinearAlgebra, Distributions, AdvancedHMC, Random, ForwardDiff
using ReTest, LinearAlgebra, Distributions, AdvancedHMC, Random, ForwardDiff
using AdvancedHMC.Adaptation: WelfordVar, NaiveVar, WelfordCov, NaiveCov, get_estimation, get_estimation, reset!

# Check that the estimated variance is approximately correct.
@testset "Online v.s. naive v.s. true var/cov estimation" begin
D = 10
T = Float64
sz = (D,)
n_samples = 100_000

var_welford = WelfordVar{T}(sz)
var_naive = NaiveVar{T}(sz)
var_estimators = [var_welford, var_naive]
cov_welford = WelfordCov{T}(sz)
cov_naive = NaiveCov{T}(sz)
cov_estimators = [cov_welford, cov_naive]
estimators = [var_estimators..., cov_estimators...]

for dist in [MvNormal(zeros(D), I), Dirichlet(D, 1)]
for _ = 1:n_samples
s = rand(dist)
for estimator in estimators
push!(estimator, s)
end
end
function runnuts(ℓπ, metric; n_samples=3_000)
D = size(metric, 1)
n_adapts = 1_500

@test get_estimation(var_welford) get_estimation(var_naive) atol=0.1D
for estimator in var_estimators
@test get_estimation(estimator) var(dist) atol=0.1D
end
θ_init = rand(D)

@test get_estimation(cov_welford) get_estimation(cov_naive) atol=0.1D^2
for estimator in cov_estimators
@test get_estimation(estimator) cov(dist) atol=0.1D^2
end

for estimator in estimators
reset!(estimator)
end
end
h = Hamiltonian(metric, ℓπ, ForwardDiff)
κ = NUTS(Leapfrog(find_good_stepsize(h, θ_init)))
adaptor = StanHMCAdaptor(
MassMatrixAdaptor(metric),
StepSizeAdaptor(0.8, κ.τ.integrator)
)
samples, stats = sample(h, κ, θ_init, n_samples, adaptor, n_adapts; verbose=false)
return (samples=samples, stats=stats, adaptor=adaptor)
end

@testset "MassMatrixAdaptor constructors" begin
θ = [0.0, 0.0, 0.0, 0.0]
pc1 = MassMatrixAdaptor(UnitEuclideanMetric) # default dim = 2
pc2 = MassMatrixAdaptor(DiagEuclideanMetric)
pc3 = MassMatrixAdaptor(DenseEuclideanMetric)

# Var adaptor dimention should be increased to length(θ) from 2
AdvancedHMC.adapt!(pc1, θ, 1.)
AdvancedHMC.adapt!(pc2, θ, 1.)
AdvancedHMC.adapt!(pc3, θ, 1.)
@test AdvancedHMC.Adaptation.getM⁻¹(pc2) == ones(length(θ))
@test AdvancedHMC.Adaptation.getM⁻¹(pc3) == LinearAlgebra.diagm(0 => ones(length(θ)))
end
@testset "Adaptation" begin
# Check that the estimated variance is approximately correct.
@testset "Online v.s. naive v.s. true var/cov estimation" begin
D = 10
T = Float64
sz = (D,)
n_samples = 100_000

var_welford = WelfordVar{T}(sz)
var_naive = NaiveVar{T}(sz)
var_estimators = [var_welford, var_naive]
cov_welford = WelfordCov{T}(sz)
cov_naive = NaiveCov{T}(sz)
cov_estimators = [cov_welford, cov_naive]
estimators = [var_estimators..., cov_estimators...]

for dist in [MvNormal(zeros(D), I), Dirichlet(D, 1)]
for _ = 1:n_samples
s = rand(dist)
for estimator in estimators
push!(estimator, s)
end
end

@testset "Stan HMC adaptors" begin
θ = [0.0, 0.0, 0.0, 0.0]
@test get_estimation(var_welford) get_estimation(var_naive) atol=0.1D
for estimator in var_estimators
@test get_estimation(estimator) var(dist) atol=0.1D
end

adaptor1 = StanHMCAdaptor(
MassMatrixAdaptor(UnitEuclideanMetric),
NesterovDualAveraging(0.8, 0.5)
)
adaptor2 = StanHMCAdaptor(
MassMatrixAdaptor(DiagEuclideanMetric),
NesterovDualAveraging(0.8, 0.5)
)
adaptor3 = StanHMCAdaptor(
MassMatrixAdaptor(DenseEuclideanMetric),
NesterovDualAveraging(0.8, 0.5)
)
for a in [adaptor1, adaptor2, adaptor3]
AdvancedHMC.initialize!(a, 1_000)
@test a.state.window_start == 76
@test a.state.window_end == 950
@test a.state.window_splits == [100, 150, 250, 450, 950]
AdvancedHMC.adapt!(a, θ, 1.)
end
@test AdvancedHMC.Adaptation.getM⁻¹(adaptor2) == ones(length(θ))
@test AdvancedHMC.Adaptation.getM⁻¹(adaptor3) == LinearAlgebra.diagm(0 => ones(length(θ)))
@test get_estimation(cov_welford) get_estimation(cov_naive) atol=0.1D^2
for estimator in cov_estimators
@test get_estimation(estimator) cov(dist) atol=0.1D^2
end

@test_deprecated StanHMCAdaptor(
1_000,
MassMatrixAdaptor(DiagEuclideanMetric),
NesterovDualAveraging(0.8, 0.5)
)
for estimator in estimators
reset!(estimator)
end
end
end

@testset "buffer > `n_adapts`" begin
AdvancedHMC.initialize!(
StanHMCAdaptor(
MassMatrixAdaptor(DenseEuclideanMetric),
NesterovDualAveraging(0.8, 0.5)
),
100
)
@testset "MassMatrixAdaptor constructors" begin
θ = [0.0, 0.0, 0.0, 0.0]
pc1 = MassMatrixAdaptor(UnitEuclideanMetric) # default dim = 2
pc2 = MassMatrixAdaptor(DiagEuclideanMetric)
pc3 = MassMatrixAdaptor(DenseEuclideanMetric)

# Var adaptor dimention should be increased to length(θ) from 2
AdvancedHMC.adapt!(pc1, θ, 1.)
AdvancedHMC.adapt!(pc2, θ, 1.)
AdvancedHMC.adapt!(pc3, θ, 1.)
@test AdvancedHMC.Adaptation.getM⁻¹(pc2) == ones(length(θ))
@test AdvancedHMC.Adaptation.getM⁻¹(pc3) == LinearAlgebra.diagm(0 => ones(length(θ)))
end
end

let D=10
function runnuts(ℓπ, metric; n_samples=3_000)
n_adapts = 1_500
@testset "Stan HMC adaptors" begin
θ = [0.0, 0.0, 0.0, 0.0]

θ_init = rand(D)
adaptor1 = StanHMCAdaptor(
MassMatrixAdaptor(UnitEuclideanMetric),
NesterovDualAveraging(0.8, 0.5)
)
adaptor2 = StanHMCAdaptor(
MassMatrixAdaptor(DiagEuclideanMetric),
NesterovDualAveraging(0.8, 0.5)
)
adaptor3 = StanHMCAdaptor(
MassMatrixAdaptor(DenseEuclideanMetric),
NesterovDualAveraging(0.8, 0.5)
)
for a in [adaptor1, adaptor2, adaptor3]
AdvancedHMC.initialize!(a, 1_000)
@test a.state.window_start == 76
@test a.state.window_end == 950
@test a.state.window_splits == [100, 150, 250, 450, 950]
AdvancedHMC.adapt!(a, θ, 1.)
end
@test AdvancedHMC.Adaptation.getM⁻¹(adaptor2) == ones(length(θ))
@test AdvancedHMC.Adaptation.getM⁻¹(adaptor3) == LinearAlgebra.diagm(0 => ones(length(θ)))

h = Hamiltonian(metric, ℓπ, ForwardDiff)
κ = NUTS(Leapfrog(find_good_stepsize(h, θ_init)))
adaptor = StanHMCAdaptor(
MassMatrixAdaptor(metric),
StepSizeAdaptor(0.8, κ.τ.integrator)
@test_deprecated StanHMCAdaptor(
1_000,
MassMatrixAdaptor(DiagEuclideanMetric),
NesterovDualAveraging(0.8, 0.5)
)
samples, stats = sample(h, κ, θ_init, n_samples, adaptor, n_adapts; verbose=false)
return (samples=samples, stats=stats, adaptor=adaptor)

@testset "buffer > `n_adapts`" begin
AdvancedHMC.initialize!(
StanHMCAdaptor(
MassMatrixAdaptor(DenseEuclideanMetric),
NesterovDualAveraging(0.8, 0.5)
),
100
)
end
end

@testset "Adapted mass v.s. true variance" begin
D = 10
n_tests = 5

@testset "DiagEuclideanMetric" begin
for _ in 1:n_tests
Random.seed!(1)
Expand Down Expand Up @@ -166,21 +167,21 @@ let D=10
res = runnuts(ℓπ, DenseEuclideanMetric(mass_init); n_samples=1)
@test res.adaptor.pc.cov == mass_init
end
end

@testset "Deprecation" begin
dim = 10
@test_deprecated Preconditioner(UnitEuclideanMetric(dim))
@test_deprecated Preconditioner(DiagEuclideanMetric(dim))
@test_deprecated Preconditioner(DenseEuclideanMetric(dim))
@test_deprecated Preconditioner(UnitEuclideanMetric)
@test_deprecated Preconditioner(DiagEuclideanMetric)
@test_deprecated Preconditioner(DenseEuclideanMetric)
for T in [Float32, Float64]
@test_deprecated Preconditioner(T, UnitEuclideanMetric)
@test_deprecated Preconditioner(T, DiagEuclideanMetric)
@test_deprecated Preconditioner(T, DenseEuclideanMetric)
@testset "Deprecation" begin
dim = 10
@test_deprecated Preconditioner(UnitEuclideanMetric(dim))
@test_deprecated Preconditioner(DiagEuclideanMetric(dim))
@test_deprecated Preconditioner(DenseEuclideanMetric(dim))
@test_deprecated Preconditioner(UnitEuclideanMetric)
@test_deprecated Preconditioner(DiagEuclideanMetric)
@test_deprecated Preconditioner(DenseEuclideanMetric)
for T in [Float32, Float64]
@test_deprecated Preconditioner(T, UnitEuclideanMetric)
@test_deprecated Preconditioner(T, DiagEuclideanMetric)
@test_deprecated Preconditioner(T, DenseEuclideanMetric)
end
@test_deprecated NesterovDualAveraging(0.8, Leapfrog(0.1))
@test_deprecated StanHMCAdaptor(100, MassMatrixAdaptor(UnitEuclideanMetric(dim)), StepSizeAdaptor(0.8, Leapfrog(0.1)))
end
@test_deprecated NesterovDualAveraging(0.8, Leapfrog(0.1))
@test_deprecated StanHMCAdaptor(100, MassMatrixAdaptor(UnitEuclideanMetric(dim)), StepSizeAdaptor(0.8, Leapfrog(0.1)))
end
7 changes: 2 additions & 5 deletions test/common.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,6 @@ const RNDATOL = 5e-2 * D * TRATIO * 2

# Hand-coded multivariate Gaussian

const gaussian_m = zeros(D)
const gaussian_s = ones(D)

struct Gaussian{Tm, Ts}
m::Tm
s::Ts
Expand Down Expand Up @@ -51,8 +48,8 @@ function get_∇ℓπ(g::Gaussian)
return ∇ℓπ
end

ℓπ = get_ℓπ(Gaussian(gaussian_m, gaussian_s))
∂ℓπ∂θ = get_∇ℓπ(Gaussian(gaussian_m, gaussian_s))
ℓπ = get_ℓπ(Gaussian(zeros(D), ones(D)))
∂ℓπ∂θ = get_∇ℓπ(Gaussian(zeros(D), ones(D)))

# For the Turing model
# @model gdemo() = begin
Expand Down
4 changes: 2 additions & 2 deletions test/contrib.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using Test, AdvancedHMC, ForwardDiff, Zygote
using ReTest, AdvancedHMC, ForwardDiff, Zygote

include("common.jl")

Expand All @@ -18,4 +18,4 @@ include("common.jl")
@test g_hand g_forwarddiff
end
end
end
end
4 changes: 2 additions & 2 deletions test/cuda.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using Test
using ReTest
using AdvancedHMC
using AdvancedHMC: DualValue, PhasePoint
using CUDA
Expand Down Expand Up @@ -48,4 +48,4 @@ end
@test z1.ℓπ.value == z2.ℓπ.value
@test z1.ℓκ.value == z2.ℓκ.value
end
end
end
6 changes: 3 additions & 3 deletions test/demo.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using Test
using ReTest
using AdvancedHMC, Distributions, ForwardDiff, ComponentArrays
using LinearAlgebra

Expand Down Expand Up @@ -30,7 +30,7 @@ using LinearAlgebra
# Run the sampler to draw samples from the specified Gaussian, where
# - `samples` will store the samples
# - `stats` will store diagnostic statistics for each sample
samples, stats = sample(hamiltonian, proposal, initial_θ, n_samples, adaptor, n_adapts; progress=true)
samples, stats = sample(hamiltonian, proposal, initial_θ, n_samples, adaptor, n_adapts; progress=false, verbose=false)

@test length(samples) == n_samples
@test length(stats) == n_samples
Expand Down Expand Up @@ -63,7 +63,7 @@ end
# -- run sampler
n_samples, n_adapts = 100, 50
samples, stats = sample(hamiltonian, proposal, p1, n_samples,
adaptor, n_adapts; progress=false)
adaptor, n_adapts; progress=false, verbose=false)

@test length(samples) == n_samples
@test length(stats) == n_samples
Expand Down
2 changes: 1 addition & 1 deletion test/hamiltonian.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using Test, AdvancedHMC
using ReTest, AdvancedHMC
using AdvancedHMC: DualValue, PhasePoint
using LinearAlgebra: dot, diagm

Expand Down
Loading

0 comments on commit d5d4213

Please sign in to comment.