From 150a35a94a12b7e2d734f1336ade5ccb1dc43a22 Mon Sep 17 00:00:00 2001 From: Tor Erlend Fjelde Date: Fri, 26 Apr 2024 01:38:20 +0100 Subject: [PATCH] Backport of some features to v0.2 (#56) * added Turing integration tests * added weakdeps * fixed Project.toml * added extensions * moved to usage of extensions + ADTypes.jl * added test toml * added Flux and Enzyme as weakdeps * added Enzyme ext * fixed accidental includes * fix requires and disa le Enzyme tests * bump patch version * fixed Requires usage * another Project.toml fix * more toml fixing * maybe now --- .github/workflows/IntegrationTest.yml | 50 ++++++++++ Project.toml | 33 +++++-- ext/AdvancedVIEnzymeExt.jl | 42 +++++++++ ext/AdvancedVIFluxExt.jl | 13 +++ ext/AdvancedVIReverseDiffExt.jl | 40 ++++++++ ext/AdvancedVIZygoteExt.jl | 39 ++++++++ src/AdvancedVI.jl | 130 +++++++------------------- src/ad.jl | 16 ++-- src/advi.jl | 6 +- test/Project.toml | 18 ++++ test/runtests.jl | 50 ++++++---- 11 files changed, 306 insertions(+), 131 deletions(-) create mode 100644 .github/workflows/IntegrationTest.yml create mode 100644 ext/AdvancedVIEnzymeExt.jl create mode 100644 ext/AdvancedVIFluxExt.jl create mode 100644 ext/AdvancedVIReverseDiffExt.jl create mode 100644 ext/AdvancedVIZygoteExt.jl create mode 100644 test/Project.toml diff --git a/.github/workflows/IntegrationTest.yml b/.github/workflows/IntegrationTest.yml new file mode 100644 index 00000000..fec37f7c --- /dev/null +++ b/.github/workflows/IntegrationTest.yml @@ -0,0 +1,50 @@ +name: IntegrationTest + +on: + push: + branches: + - master + merge_group: + types: [checks_requested] + pull_request: + branches: [v0.2-backport] + +jobs: + test: + name: ${{ matrix.package.repo }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + package: + - {user: TuringLang, repo: Turing.jl} + + steps: + - uses: actions/checkout@v2 + - uses: julia-actions/setup-julia@v1 + with: + version: 1 + arch: x64 + - uses: julia-actions/julia-buildpkg@latest + - name: Clone Downstream + uses: actions/checkout@v2 + with: + repository: ${{ matrix.package.user }}/${{ matrix.package.repo }} + path: downstream + - name: Load this and run the downstream tests + shell: julia --color=yes --project=downstream {0} + run: | + using Pkg + try + # force it to use this PR's version of the package + Pkg.develop(PackageSpec(path=".")) # resolver may fail with main deps + Pkg.update() + Pkg.test(julia_args=["--depwarn=no"]) # resolver may fail with test time deps + catch err + err isa Pkg.Resolve.ResolverError || rethrow() + # If we can't resolve that means this is incompatible by SemVer and this is fine + # It means we marked this as a breaking change, so we don't need to worry about + # Mistakenly introducing a breaking change, as we have intentionally made one + @info "Not compatible with this release. No problem." exception=err + exit(0) # Exit immediately, as a success + end diff --git a/Project.toml b/Project.toml index 800fa34f..98e50b16 100644 --- a/Project.toml +++ b/Project.toml @@ -1,9 +1,11 @@ name = "AdvancedVI" uuid = "b5ca4192-6429-45e5-a2d9-87aec30a685c" -version = "0.2.4" +version = "0.2.5" [deps] +ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" Bijectors = "76274a88-744f-5084-9051-94815aaf08c4" +DiffResults = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" DistributionsAD = "ced4e74d-a319-5a8a-b0ac-84af2272839c" DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" @@ -16,22 +18,39 @@ StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" +[weakdeps] +Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" +Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" +ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" +Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + +[extensions] +AdvancedVIEnzymeExt = ["Enzyme"] +AdvancedVIFluxExt = ["Flux"] +AdvancedVIReverseDiffExt = ["ReverseDiff"] +AdvancedVIZygoteExt = ["Zygote"] + [compat] Bijectors = "0.11, 0.12, 0.13" Distributions = "0.21, 0.22, 0.23, 0.24, 0.25" DistributionsAD = "0.2, 0.3, 0.4, 0.5, 0.6" DocStringExtensions = "0.8, 0.9" +Enzyme = "0.12" +LinearAlgebra = "1.6" ForwardDiff = "0.10.3" +Flux = "0.14" ProgressMeter = "1.0.0" -Requires = "0.5, 1.0" +Random = "1.6" +Requires = "1" +ReverseDiff = "1" StatsBase = "0.32, 0.33, 0.34" StatsFuns = "0.8, 0.9, 1" Tracker = "0.2.3" +Zygote = "0.6" julia = "1.6" [extras] -Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" -Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" - -[targets] -test = ["Pkg", "Test"] +Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" +Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" +ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" +Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" diff --git a/ext/AdvancedVIEnzymeExt.jl b/ext/AdvancedVIEnzymeExt.jl new file mode 100644 index 00000000..025c3901 --- /dev/null +++ b/ext/AdvancedVIEnzymeExt.jl @@ -0,0 +1,42 @@ +module AdvancedVIEnzymeExt + +if isdefined(Base, :get_extension) + using AdvancedVI: AdvancedVI, ADTypes, DiffResults, Distributions + using Enzyme: Enzyme +else + using ..AdvancedVI: AdvancedVI, ADTypes, DiffResults, Distributions + using ..Enzyme: Enzyme +end + +AdvancedVI.ADBackend(::Val{:enzyme}) = ADTypes.AutoEnzyme() +function AdvancedVI.setadbackend(::Val{:enzyme}) + Base.depwarn("`setadbackend` is deprecated. Please pass a `ADTypes.AbstractADType` as a keyword argument to the VI algorithm.", :setadbackend) + AdvancedVI.ADBACKEND[] = :enzyme +end + +function AdvancedVI.grad!( + vo, + alg::AdvancedVI.VariationalInference{<:ADTypes.AutoEnzyme}, + q, + model, + θ::AbstractVector{<:Real}, + out::DiffResults.MutableDiffResult, + args... +) + f(θ) = + if (q isa Distributions.Distribution) + -vo(alg, AdvancedVI.update(q, θ), model, args...) + else + -vo(alg, q(θ), model, args...) + end + # Use `Enzyme.ReverseWithPrimal` once it is released: + # https://github.com/EnzymeAD/Enzyme.jl/pull/598 + y = f(θ) + DiffResults.value!(out, y) + dy = DiffResults.gradient(out) + fill!(dy, 0) + Enzyme.autodiff(Enzyme.ReverseWithPrimal, f, Enzyme.Active, Enzyme.Duplicated(θ, dy)) + return out +end + +end diff --git a/ext/AdvancedVIFluxExt.jl b/ext/AdvancedVIFluxExt.jl new file mode 100644 index 00000000..d87cb23e --- /dev/null +++ b/ext/AdvancedVIFluxExt.jl @@ -0,0 +1,13 @@ +module AdvancedVIFluxExt + +if isdefined(Base, :get_extension) + using AdvancedVI: AdvancedVI + using Flux: Flux +else + using ..AdvancedVI: AdvancedVI + using ..Flux: Flux +end + +AdvancedVI.apply!(o::Flux.Optimise.AbstractOptimiser, x, Δ) = Flux.Optimise.apply!(o, x, Δ) + +end diff --git a/ext/AdvancedVIReverseDiffExt.jl b/ext/AdvancedVIReverseDiffExt.jl new file mode 100644 index 00000000..dbe0d108 --- /dev/null +++ b/ext/AdvancedVIReverseDiffExt.jl @@ -0,0 +1,40 @@ +module AdvancedVIReverseDiffExt + +if isdefined(Base, :get_extension) + using AdvancedVI: AdvancedVI, ADTypes, DiffResults, Distributions + using ReverseDiff: ReverseDiff +else + using ..AdvancedVI: AdvancedVI, ADTypes, DiffResults, Distributions + using ..ReverseDiff: ReverseDiff +end + +AdvancedVI.ADBackend(::Val{:reversediff}) = ADTypes.AutoReverseDiff() + +function AdvancedVI.setadbackend(::Val{:reversediff}) + Base.depwarn("`setadbackend` is deprecated. Please pass a `ADTypes.AbstractADType` as a keyword argument to the VI algorithm.", :setadbackend) + AdvancedVI.ADBACKEND[] = :reversediff +end + +tape(f, x) = ReverseDiff.GradientTape(f, x) + +function AdvancedVI.grad!( + vo, + alg::AdvancedVI.VariationalInference{<:ADTypes.AutoReverseDiff}, + q, + model, + θ::AbstractVector{<:Real}, + out::DiffResults.MutableDiffResult, + args... +) + f(θ) = + if (q isa Distributions.Distribution) + -vo(alg, AdvancedVI.update(q, θ), model, args...) + else + -vo(alg, q(θ), model, args...) + end + tp = tape(f, θ) + ReverseDiff.gradient!(out, tp, θ) + return out +end + +end diff --git a/ext/AdvancedVIZygoteExt.jl b/ext/AdvancedVIZygoteExt.jl new file mode 100644 index 00000000..efb27732 --- /dev/null +++ b/ext/AdvancedVIZygoteExt.jl @@ -0,0 +1,39 @@ +module AdvancedVIZygoteExt + +if isdefined(Base, :get_extension) + using AdvancedVI: AdvancedVI, ADTypes, DiffResults, Distributions + using Zygote: Zygote +else + using ..AdvancedVI: AdvancedVI, ADTypes, DiffResults, Distributions + using ..Zygote: Zygote +end + +AdvancedVI.ADBackend(::Val{:zygote}) = ADTypes.AutoZygote() +function AdvancedVI.setadbackend(::Val{:zygote}) + Base.depwarn("`setadbackend` is deprecated. Please pass a `ADTypes.AbstractADType` as a keyword argument to the VI algorithm.", :setadbackend) + AdvancedVI.ADBACKEND[] = :zygote +end + +function AdvancedVI.grad!( + vo, + alg::AdvancedVI.VariationalInference{<:ADTypes.AutoZygote}, + q, + model, + θ::AbstractVector{<:Real}, + out::DiffResults.MutableDiffResult, + args... +) + f(θ) = + if (q isa Distributions.Distribution) + -vo(alg, AdvancedVI.update(q, θ), model, args...) + else + -vo(alg, q(θ), model, args...) + end + y, back = Zygote.pullback(f, θ) + dy = first(back(1.0)) + DiffResults.value!(out, y) + DiffResults.gradient!(out, dy) + return out +end + +end diff --git a/src/AdvancedVI.jl b/src/AdvancedVI.jl index e203a13c..59ae0e24 100644 --- a/src/AdvancedVI.jl +++ b/src/AdvancedVI.jl @@ -7,8 +7,11 @@ using DocStringExtensions using ProgressMeter, LinearAlgebra -using ForwardDiff -using Tracker +using ADTypes: ADTypes +using DiffResults: DiffResults + +using ForwardDiff: ForwardDiff +using Tracker: Tracker const PROGRESS = Ref(true) function turnprogress(switch::Bool) @@ -18,94 +21,6 @@ end const DEBUG = Bool(parse(Int, get(ENV, "DEBUG_ADVANCEDVI", "0"))) -include("ad.jl") -include("utils.jl") - -using Requires -function __init__() - @require Flux="587475ba-b771-5e3f-ad9e-33799f191a9c" begin - apply!(o, x, Δ) = Flux.Optimise.apply!(o, x, Δ) - Flux.Optimise.apply!(o::TruncatedADAGrad, x, Δ) = apply!(o, x, Δ) - Flux.Optimise.apply!(o::DecayedADAGrad, x, Δ) = apply!(o, x, Δ) - end - @require Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" begin - include("compat/zygote.jl") - export ZygoteAD - - function AdvancedVI.grad!( - vo, - alg::VariationalInference{<:AdvancedVI.ZygoteAD}, - q, - model, - θ::AbstractVector{<:Real}, - out::DiffResults.MutableDiffResult, - args... - ) - f(θ) = if (q isa Distribution) - - vo(alg, update(q, θ), model, args...) - else - - vo(alg, q(θ), model, args...) - end - y, back = Zygote.pullback(f, θ) - dy = first(back(1.0)) - DiffResults.value!(out, y) - DiffResults.gradient!(out, dy) - return out - end - end - @require ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" begin - include("compat/reversediff.jl") - export ReverseDiffAD - - function AdvancedVI.grad!( - vo, - alg::VariationalInference{<:AdvancedVI.ReverseDiffAD{false}}, - q, - model, - θ::AbstractVector{<:Real}, - out::DiffResults.MutableDiffResult, - args... - ) - f(θ) = if (q isa Distribution) - - vo(alg, update(q, θ), model, args...) - else - - vo(alg, q(θ), model, args...) - end - tp = AdvancedVI.tape(f, θ) - ReverseDiff.gradient!(out, tp, θ) - return out - end - end - @require Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" begin - include("compat/enzyme.jl") - export EnzymeAD - - function AdvancedVI.grad!( - vo, - alg::VariationalInference{<:AdvancedVI.EnzymeAD}, - q, - model, - θ::AbstractVector{<:Real}, - out::DiffResults.MutableDiffResult, - args... - ) - f(θ) = if (q isa Distribution) - - vo(alg, update(q, θ), model, args...) - else - - vo(alg, q(θ), model, args...) - end - # Use `Enzyme.ReverseWithPrimal` once it is released: - # https://github.com/EnzymeAD/Enzyme.jl/pull/598 - y = f(θ) - DiffResults.value!(out, y) - dy = DiffResults.gradient(out) - fill!(dy, 0) - Enzyme.autodiff(Enzyme.ReverseWithPrimal, f, Enzyme.Active, Enzyme.Duplicated(θ, dy)) - return out - end - end -end - export vi, ADVI, @@ -115,10 +30,12 @@ export DecayedADAGrad, VariationalInference +include("utils.jl") +include("ad.jl") + abstract type VariationalInference{AD} end -getchunksize(::Type{<:VariationalInference{AD}}) where AD = getchunksize(AD) -getADtype(::VariationalInference{AD}) where AD = AD +getchunksize(::ADTypes.AutoForwardDiff{chunk}) where chunk = chunk === nothing ? 0 : chunk abstract type VariationalObjective end @@ -129,7 +46,7 @@ const VariationalPosterior = Distribution{Multivariate, Continuous} grad!(vo, alg::VariationalInference, q, model::Model, θ, out, args...) Computes the gradients used in `optimize!`. Default implementation is provided for -`VariationalInference{AD}` where `AD` is either `ForwardDiffAD` or `TrackerAD`. +`VariationalInference{AD}` where `AD` is either `ADTypes.AutoForwardDiff` or `ADTypes.AutoTracker`. This implicitly also gives a default implementation of `optimize!`. Variance reduction techniques, e.g. control variates, should be implemented in this function. @@ -158,7 +75,7 @@ function update end # default implementations function grad!( vo, - alg::VariationalInference{<:ForwardDiffAD}, + alg::VariationalInference{<:ADTypes.AutoForwardDiff}, q, model, θ::AbstractVector{<:Real}, @@ -172,7 +89,7 @@ function grad!( end # Set chunk size and do ForwardMode. - chunk_size = getchunksize(typeof(alg)) + chunk_size = getchunksize(alg.adtype) config = if chunk_size == 0 ForwardDiff.GradientConfig(f, θ) else @@ -183,7 +100,7 @@ end function grad!( vo, - alg::VariationalInference{<:TrackerAD}, + alg::VariationalInference{<:ADTypes.AutoTracker}, q, model, θ::AbstractVector{<:Real}, @@ -267,4 +184,25 @@ include("optimisers.jl") # VI algorithms include("advi.jl") +if !isdefined(Base, :get_extension) + using Requires +end + +@static if !isdefined(Base, :get_extension) + function __init__() + @require ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" include( + "../ext/AdvancedVIReverseDiffExt.jl" + ) + @require Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" include( + "../ext/AdvancedVIZygoteExt.jl" + ) + @require Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" include( + "../ext/AdvancedVIEnzymeExt.jl" + ) + @require Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" include( + "../ext/AdvancedVIFluxExt.jl" + ) + end +end + end # module diff --git a/src/ad.jl b/src/ad.jl index 62e785e1..d40f5f50 100644 --- a/src/ad.jl +++ b/src/ad.jl @@ -1,6 +1,4 @@ -############################## -# Global variables/constants # -############################## +# FIXME: All this should go away. const ADBACKEND = Ref(:forwarddiff) setadbackend(backend_sym::Symbol) = setadbackend(Val(backend_sym)) function setadbackend(::Val{:forward_diff}) @@ -8,6 +6,7 @@ function setadbackend(::Val{:forward_diff}) setadbackend(Val(:forwarddiff)) end function setadbackend(::Val{:forwarddiff}) + Base.depwarn("`setadbackend` is deprecated. Please pass a `ADTypes.AbstractADType` as a keyword argument to the VI algorithm.", :setadbackend) ADBACKEND[] = :forwarddiff end @@ -16,6 +15,7 @@ function setadbackend(::Val{:reverse_diff}) setadbackend(Val(:tracker)) end function setadbackend(::Val{:tracker}) + Base.depwarn("`setadbackend` is deprecated. Please pass a `ADTypes.AbstractADType` as a keyword argument to the VI algorithm.", :setadbackend) ADBACKEND[] = :tracker end @@ -32,15 +32,11 @@ function setchunksize(chunk_size::Int) CHUNKSIZE[] = chunk_size end -abstract type ADBackend end -struct ForwardDiffAD{chunk} <: ADBackend end -getchunksize(::Type{<:ForwardDiffAD{chunk}}) where chunk = chunk - -struct TrackerAD <: ADBackend end +getchunksize(::Type{<:ADTypes.AutoForwardDiff{chunk}}) where chunk = chunk ADBackend() = ADBackend(ADBACKEND[]) ADBackend(T::Symbol) = ADBackend(Val(T)) -ADBackend(::Val{:forwarddiff}) = ForwardDiffAD{CHUNKSIZE[]} -ADBackend(::Val{:tracker}) = TrackerAD +ADBackend(::Val{:forwarddiff}) = ADTypes.AutoForwardDiff(chunksize=CHUNKSIZE[]) +ADBackend(::Val{:tracker}) = ADTypes.AutoTracker() ADBackend(::Val) = error("The requested AD backend is not available. Make sure to load all required packages.") diff --git a/src/advi.jl b/src/advi.jl index 7f9e7346..61733547 100644 --- a/src/advi.jl +++ b/src/advi.jl @@ -19,10 +19,12 @@ struct ADVI{AD} <: VariationalInference{AD} samples_per_step::Int "Maximum number of gradient steps." max_iters::Int + "AD backend used for automatic differentiation." + adtype::AD end -function ADVI(samples_per_step::Int=1, max_iters::Int=1000) - return ADVI{ADBackend()}(samples_per_step, max_iters) +function ADVI(samples_per_step::Int=1, max_iters::Int=1000; adtype::ADTypes.AbstractADType=ADTypes.AutoForwardDiff()) + return ADVI(samples_per_step, max_iters, adtype) end alg_str(::ADVI) = "ADVI" diff --git a/test/Project.toml b/test/Project.toml new file mode 100644 index 00000000..6221471c --- /dev/null +++ b/test/Project.toml @@ -0,0 +1,18 @@ +[deps] +ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" +Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" +DistributionsAD = "ced4e74d-a319-5a8a-b0ac-84af2272839c" +Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" +Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" +ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" +LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" +Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + +[compat] +Distributions = "0.21, 0.22, 0.23, 0.24, 0.25" +DistributionsAD = "0.2, 0.3, 0.4, 0.5, 0.6" +ForwardDiff = "0.10.3" diff --git a/test/runtests.jl b/test/runtests.jl index a305c25e..71a611e0 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,28 +1,46 @@ using Test using Distributions, DistributionsAD +using ADTypes +using ForwardDiff: ForwardDiff +using ReverseDiff: ReverseDiff +using Tracker: Tracker +using Zygote: Zygote +using Enzyme: Enzyme +Enzyme.API.runtimeActivity!(true); +Enzyme.API.typeWarning!(false); + using AdvancedVI +function AdvancedVI.update(q::TuringDiagMvNormal, θ::AbstractArray{<:Real}) + return TuringDiagMvNormal(θ[1:length(q)], exp.(θ[length(q)+1:end])) +end + include("optimisers.jl") -target = MvNormal(ones(2)) -logπ(z) = logpdf(target, z) -advi = ADVI(10, 1000) +@testset "$adtype" for adtype in [ + AutoForwardDiff(), + AutoReverseDiff(), + AutoTracker(), + AutoZygote(), + # AutoEnzyme() # results in incorrect result +] + target = MvNormal(ones(2)) + logπ(z) = logpdf(target, z) + advi = ADVI(10, 1000; adtype) -# Using a function z ↦ q(⋅∣z) -getq(θ) = TuringDiagMvNormal(θ[1:2], exp.(θ[3:4])) -q = vi(logπ, advi, getq, randn(4)) + # Using a function z ↦ q(⋅∣z) + getq(θ) = TuringDiagMvNormal(θ[1:2], exp.(θ[3:4])) + q = vi(logπ, advi, getq, randn(4)) -xs = rand(target, 10) -@test mean(abs2, logpdf(q, xs) - logpdf(target, xs)) ≤ 0.05 + xs = rand(target, 10) + @test mean(abs2, logpdf(q, xs) - logpdf(target, xs)) ≤ 0.05 -# OR: implement `update` and pass a `Distribution` -function AdvancedVI.update(d::TuringDiagMvNormal, θ::AbstractArray{<:Real}) - return TuringDiagMvNormal(θ[1:length(q)], exp.(θ[length(q) + 1:end])) -end + # OR: implement `update` and pass a `Distribution` + q0 = TuringDiagMvNormal(zeros(2), ones(2)) -q0 = TuringDiagMvNormal(zeros(2), ones(2)) -q = vi(logπ, advi, q0, randn(4)) + q = vi(logπ, advi, q0, randn(4)) -xs = rand(target, 10) -@test mean(abs2, logpdf(q, xs) - logpdf(target, xs)) ≤ 0.05 + xs = rand(target, 10) + @test mean(abs2, logpdf(q, xs) - logpdf(target, xs)) ≤ 0.05 +end