Skip to content

Commit

Permalink
Drop Turing test dep (#267)
Browse files Browse the repository at this point in the history
* Remove Turing from Project

* Tidy up testing

* Bump DynamicPPL dep

* Remove ReverseDiff from test deps

* Swap out Turing for DynamicPPL + Distributions in benchmarking

* Bump patch
  • Loading branch information
willtebbutt authored Sep 26, 2024
1 parent b31cb44 commit f4e3ca2
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 91 deletions.
9 changes: 3 additions & 6 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "Mooncake"
uuid = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6"
authors = ["Will Tebbutt, Hong Ge, and contributors"]
version = "0.4.1"
version = "0.4.2"

[deps]
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
Expand Down Expand Up @@ -40,7 +40,7 @@ DiffRules = "1"
DiffTests = "0.1"
Distributions = "0.25"
Documenter = "1"
DynamicPPL = "0.28"
DynamicPPL = "0.29"
ExprTools = "0.1"
FillArrays = "1"
Graphs = "1"
Expand All @@ -52,7 +52,6 @@ Setfield = "1"
SpecialFunctions = "2"
StableRNGs = "1"
TemporalGPs = "0.6"
Turing = "0.34"
julia = "1.10"

[extras]
Expand All @@ -68,12 +67,10 @@ JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b"
KernelFunctions = "ec8451be-7e33-11e9-00cf-bbf324bd1392"
LogDensityProblemsAD = "996a588d-648d-4e1f-a8f0-a84b347e47b1"
PDMats = "90014a1f-27ba-587c-ab20-58faa44d9150"
ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b"
StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3"
TemporalGPs = "e155a3c4-0841-43e1-8b83-a0e4f03cc18f"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Turing = "fce5fe82-541a-59a6-adf8-730c64b5f9a0"

[targets]
test = ["AbstractGPs", "BenchmarkTools", "CUDA", "DiffTests", "Distributions", "Documenter", "DynamicPPL", "FillArrays", "KernelFunctions", "JET", "LogDensityProblemsAD", "PDMats", "ReverseDiff", "SpecialFunctions", "StableRNGs", "Test", "Turing", "TemporalGPs"]
test = ["AbstractGPs", "BenchmarkTools", "CUDA", "DiffTests", "Distributions", "Documenter", "DynamicPPL", "FillArrays", "KernelFunctions", "JET", "LogDensityProblemsAD", "PDMats", "SpecialFunctions", "StableRNGs", "Test", "TemporalGPs"]
5 changes: 3 additions & 2 deletions bench/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@ AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918"
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
Chairmarks = "0ca39b1e-fe0b-4e98-acfc-b1656634c4de"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8"
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
KernelFunctions = "ec8451be-7e33-11e9-00cf-bbf324bd1392"
Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6"
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"
PrettyTables = "08abe8d2-0d0c-5749-adfa-8a2ac140af0d"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Turing = "fce5fe82-541a-59a6-adf8-730c64b5f9a0"
UnicodePlots = "b8865327-cd53-5732-bb35-84acbb429228"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
19 changes: 10 additions & 9 deletions bench/run_benchmarks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ using
Chairmarks,
CSV,
DataFrames,
Distributions,
DynamicPPL,
Enzyme,
KernelFunctions,
LinearAlgebra,
Expand All @@ -15,7 +17,6 @@ using
ReverseDiff,
Mooncake,
Test,
Turing,
Zygote

using Mooncake:
Expand Down Expand Up @@ -106,22 +107,22 @@ end
function build_turing_problem()
rng = Xoshiro(123)
model = broadcast_demo(rand(LogNormal(1.5, 0.5), 100_000))
ctx = Turing.DefaultContext()
vi = Turing.SimpleVarInfo(model)
vi_linked = Turing.link(vi, model)
ldp = Turing.LogDensityFunction(vi_linked, model, ctx)
test_function = Base.Fix1(Turing.LogDensityProblems.logdensity, ldp)
d = Turing.LogDensityProblems.dimension(ldp)
ctx = DynamicPPL.DefaultContext()
vi = DynamicPPL.SimpleVarInfo(model)
vi_linked = DynamicPPL.link(vi, model)
ldp = DynamicPPL.LogDensityFunction(vi_linked, model, ctx)
test_function = Base.Fix1(DynamicPPL.LogDensityProblems.logdensity, ldp)
d = DynamicPPL.LogDensityProblems.dimension(ldp)
return test_function, randn(rng, d)
end

run_turing_problem(f::F, x::X) where {F, X} = f(x)

should_run_benchmark(
::Val{:zygote}, ::Base.Fix1{<:typeof(Turing.LogDensityProblems.logdensity)}, x...
::Val{:zygote}, ::Base.Fix1{<:typeof(DynamicPPL.LogDensityProblems.logdensity)}, x...
) = false
should_run_benchmark(
::Val{:enzyme}, ::Base.Fix1{<:typeof(Turing.LogDensityProblems.logdensity)}, x...
::Val{:enzyme}, ::Base.Fix1{<:typeof(DynamicPPL.LogDensityProblems.logdensity)}, x...
) = false

@inline g(x, a, ::Val{N}) where {N} = N > 0 ? g(x * a, a, Val(N-1)) : x
Expand Down
86 changes: 12 additions & 74 deletions test/integration_testing/turing.jl
Original file line number Diff line number Diff line change
@@ -1,13 +1,4 @@
using Turing

using ReverseDiff
# using CSV, DataFrames, ReverseDiff
# turing_bench_results = DataFrame(
# :name => String[],
# :primal => [],
# :gradient => [],
# :reversediff => [],
# )
using Distributions, DynamicPPL

@model function simple_model()
y ~ Normal()
Expand All @@ -27,7 +18,6 @@ end
@model broadcast_demo(x) = begin
μ ~ truncated(Normal(1, 2), 0.1, 10)
σ ~ truncated(Normal(1, 2), 0.1, 10)

x .~ LogNormal(μ, σ)
end

Expand Down Expand Up @@ -68,7 +58,7 @@ end
θ ~ filldist(Dirichlet(α), D)

log_product = log.(β * θ)
Turing.@addlogprob! sum(log_product[CartesianIndex.(w, doc)])
DynamicPPL.@addlogprob! sum(log_product[CartesianIndex.(w, doc)])
# Above is equivalent to below
#product = β * θ
#dist = [Categorical(product[:,i]) for i in 1:D]
Expand All @@ -88,12 +78,12 @@ end
make_large_model()

function build_turing_problem(rng, model, example=nothing)
ctx = Turing.DefaultContext()
vi = example === nothing ? Turing.SimpleVarInfo(model) : Turing.SimpleVarInfo(example)
vi_linked = Turing.link(vi, model)
ldp = Turing.LogDensityFunction(vi_linked, model, ctx)
test_function = Base.Fix1(Turing.LogDensityProblems.logdensity, ldp)
d = Turing.LogDensityProblems.dimension(ldp)
ctx = DynamicPPL.DefaultContext()
vi = DynamicPPL.SimpleVarInfo(example === nothing ? model : example)
vi_linked = DynamicPPL.link(vi, model)
ldp = DynamicPPL.LogDensityFunction(vi_linked, model, ctx)
test_function = Base.Fix1(DynamicPPL.LogDensityProblems.logdensity, ldp)
d = DynamicPPL.LogDensityProblems.dimension(ldp)
return test_function, randn(rng, d)
end

Expand All @@ -118,64 +108,12 @@ end
# ), doesn't currently work with SimpleVarInfo
],
Any[
(false, "demo_$n", m, Turing.DynamicPPL.TestUtils.rand_prior_true(m)) for
(n, m) in enumerate(Turing.DynamicPPL.TestUtils.DEMO_MODELS)
(false, "demo_$n", m, DynamicPPL.TestUtils.rand_prior_true(m)) for
(n, m) in enumerate(DynamicPPL.TestUtils.DEMO_MODELS)
],
)
@info name
rng = sr(123)
f, x = build_turing_problem(rng, model, ex)
test_rule(sr(123456), f, x; interface_only=true, is_primitive=false, debug_mode=true)

# rule = build_rrule(interp, _typeof((f, x)))
# codualed_args = map(zero_codual, (f, x))
# TestUtils.to_benchmark(rule, codualed_args...)

# primal = @benchmark $f($x)
# gradient = @benchmark(TestUtils.to_benchmark($rule, $codualed_args...))

# println("primal")
# display(primal)
# println()

# println("gradient")
# display(gradient)
# println()

# try
# tape = ReverseDiff.GradientTape(f, x);
# ReverseDiff.gradient!(tape, x);
# result = zeros(size(x));
# ReverseDiff.gradient!(result, tape, x)

# revdiff = @benchmark ReverseDiff.gradient!($result, $tape, $x)
# println("ReverseDiff")
# display(revdiff)
# println()
# @show time(revdiff) / time(primal)
# catch
# display("revdiff failed")
# end

# @show time(gradient) / time(primal)

# @profview run_many_times(10_000, TestUtils.to_benchmark, rule, codualed_args...)

# Profile.clear()
# @profile run_many_times(10_000, TestUtils.to_benchmark, rule, codualed_args...)
# pprof()

# push!(turing_bench_results, (name, primal, gradient, revdiff))
f, x = build_turing_problem(sr(123), model, ex)
test_rule(sr(123456), f, x; interface_only=true, is_primitive=false)
end
end

# function process_turing_bench_results(df::DataFrame)
# out_df = DataFrame(
# :name => df.name,
# :primal => map(time, df.primal),
# :gradient => map(time, df.gradient),
# :reversediff => map(time, df.reversediff),
# )
# CSV.write("turing_benchmarks.csv", out_df)
# end
# process_turing_bench_results(turing_bench_results)

2 comments on commit f4e3ca2

@willtebbutt
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/116081

Tip: Release Notes

Did you know you can add release notes too? Just add markdown formatted text underneath the comment after the text
"Release notes:" and it will be added to the registry PR, and if TagBot is installed it will also be added to the
release that TagBot creates. i.e.

@JuliaRegistrator register

Release notes:

## Breaking changes

- blah

To add them here just re-invoke and the PR will be updated.

Tagging

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.4.2 -m "<description of version>" f4e3ca2ac9e7c8fdd3d881f5ae6df101d875ea76
git push origin v0.4.2

Please sign in to comment.