From df0010de71d1399648c8c915384f3b0bebd53f92 Mon Sep 17 00:00:00 2001 From: st-- Date: Fri, 31 Dec 2021 16:07:42 +0100 Subject: [PATCH] fix Julia 1.7 tests (#85) * Julia 1.7 test failure: increase rtol in fd vs ad grad to 1e-6 * Julia 1.7 test failure: move initial theta closer to optimum * make test data rng-independent * Julia 1.7 test failure: bump rtol for NelderMead --- src/laplace.jl | 4 ++-- test/laplace.jl | 23 ++++++++++++++--------- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/src/laplace.jl b/src/laplace.jl index f28cb3b7..c6cfcfd0 100644 --- a/src/laplace.jl +++ b/src/laplace.jl @@ -216,7 +216,7 @@ end function _newton_inner_loop(dist_y_given_f, ys, K; f_init, maxiter, callback=nothing) @assert maxiter >= 1 f = f_init - cache = nothing + local cache for i in 1:maxiter @debug " - Newton iteration $i: f[1:3]=$(f[1:3])" fnew, cache = _newton_step(dist_y_given_f, ys, K, f) @@ -232,7 +232,7 @@ function _newton_inner_loop(dist_y_given_f, ys, K; f_init, maxiter, callback=not f = fnew end end - return f, something(cache) + return f, cache end function ChainRulesCore.frule(Δargs, ::typeof(_newton_inner_loop), args...; kwargs...) diff --git a/test/laplace.jl b/test/laplace.jl index 49636350..5f781ada 100644 --- a/test/laplace.jl +++ b/test/laplace.jl @@ -1,12 +1,17 @@ @testset "laplace" begin function generate_data() - Random.seed!(1) X = range(0, 23.5; length=48) - fs = @. 3 * sin(10 + 0.6X) + sin(0.1X) - 1 - # invlink = normcdf - invlink = logistic - ps = invlink.(fs) - Y = [rand(Bernoulli(p)) for p in ps] + # The random number generator changed in 1.6->1.7. The following vector was generated in Julia 1.6. + # The generating code below is only kept for illustrative purposes. + #! format: off + Y = [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0] + #! format: on + # Random.seed!(1) + # fs = @. 3 * sin(10 + 0.6X) + sin(0.1X) - 1 + # # invlink = normcdf + # invlink = logistic + # ps = invlink.(fs) + # Y = [rand(Bernoulli(p)) for p in ps] return X, Y end @@ -96,7 +101,7 @@ end fd_grad = only(FiniteDifferences.grad(central_fdm(5, 1), objective, theta0)) ad_grad = only(Zygote.gradient(objective, theta0)) - @test ad_grad ≈ fd_grad + @test ad_grad ≈ fd_grad rtol = 1e-6 end @testset "_newton_inner_loop derivatives not defined" begin @@ -185,7 +190,7 @@ @testset "optimization" begin X, Y = generate_data() - theta0 = [0.0, 1.0] + theta0 = [5.0, 1.0] @testset "reference optimum" begin function objective(theta) @@ -199,7 +204,7 @@ res = Optim.optimize(objective, theta0, NelderMead()) #@info res - @test res.minimizer ≈ expected_thetahat + @test res.minimizer ≈ expected_thetahat rtol = 1e-4 end @testset "gradient-based" begin