From 302a4999a65abf4abb63cb50a2538b9ff49c2de1 Mon Sep 17 00:00:00 2001 From: Penelope Yong Date: Wed, 25 Sep 2024 12:48:54 +0100 Subject: [PATCH] Add API docs --- docs/Project.toml | 6 ++ docs/README.md | 5 - docs/make.jl | 37 +++++++ docs/src/api.md | 168 +++++++++++++++++++++++++++++++ docs/src/api/Inference.md | 6 ++ docs/src/api/Optimisation.md | 6 ++ docs/src/index.md | 5 + docs/src/library/advancedhmc.md | 25 ----- docs/src/library/api.md | 45 --------- docs/src/library/bijectors.md | 25 ----- src/Turing.jl | 4 +- src/mcmc/Inference.jl | 6 +- src/mcmc/emcee.jl | 11 ++ src/mcmc/particle_mcmc.jl | 5 + src/optimisation/Optimisation.jl | 14 ++- src/stdlib/distributions.jl | 4 +- 16 files changed, 266 insertions(+), 106 deletions(-) create mode 100644 docs/Project.toml delete mode 100644 docs/README.md create mode 100644 docs/make.jl create mode 100644 docs/src/api.md create mode 100644 docs/src/api/Inference.md create mode 100644 docs/src/api/Optimisation.md create mode 100644 docs/src/index.md delete mode 100644 docs/src/library/advancedhmc.md delete mode 100644 docs/src/library/api.md delete mode 100644 docs/src/library/bijectors.md diff --git a/docs/Project.toml b/docs/Project.toml new file mode 100644 index 000000000..02c0d03de --- /dev/null +++ b/docs/Project.toml @@ -0,0 +1,6 @@ +[deps] +Bijectors = "76274a88-744f-5084-9051-94815aaf08c4" +Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" +Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" +DocumenterInterLinks = "d12716ef-a0f6-4df4-a9f1-a5a34e75c656" +Turing = "fce5fe82-541a-59a6-adf8-730c64b5f9a0" diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index 7a1ad9119..000000000 --- a/docs/README.md +++ /dev/null @@ -1,5 +0,0 @@ -Turing's documentation in this directory is in markdown format. - -If you want to build the doc locally, please refer to the [README](https://github.com/TuringLang/turinglang.github.io) file in [turinglang.github.io](https://github.com/TuringLang/turinglang.github.io). - -Please also visit [this repo](https://github.com/TuringLang/TuringTutorials/tree/master/tutorials) for the docs. diff --git a/docs/make.jl b/docs/make.jl new file mode 100644 index 000000000..ebde41521 --- /dev/null +++ b/docs/make.jl @@ -0,0 +1,37 @@ +using Documenter +using Turing +# Need to import Distributions and Bijectors to generate docs for functions +# from those packages. +using Distributions +using Bijectors + +using DocumenterInterLinks + +links = InterLinks( + "DynamicPPL" => "https://turinglang.org/DynamicPPL.jl/stable/objects.inv", + "AbstractPPL" => "https://turinglang.org/AbstractPPL.jl/dev/objects.inv", + "ADTypes" => "https://sciml.github.io/ADTypes.jl/stable/objects.inv", +) + +# Doctest setup +DocMeta.setdocmeta!(Turing, :DocTestSetup, :(using Turing); recursive=true) + +makedocs(; + sitename="Turing", + modules=[Turing, Distributions, Bijectors], + pages=[ + "Home" => "index.md", + "API" => "api.md", + "Submodule APIs" => [ + "Inference" => "api/Inference.md", + "Optimisation" => "api/Optimisation.md", + ] + ], + checkdocs=:exports, + # checkdocs_ignored_modules=[Turing, Distributions, DynamicPPL, AbstractPPL, Bijectors], + doctest=false, + warnonly=true, + plugins=[links], +) + +deploydocs(; repo="github.com/TuringLang/Turing.jl.git", push_preview=true) diff --git a/docs/src/api.md b/docs/src/api.md new file mode 100644 index 000000000..cedc35bb6 --- /dev/null +++ b/docs/src/api.md @@ -0,0 +1,168 @@ +# API +## Module-wide re-exports + +Turing.jl directly re-exports the entire public API of the following packages: + + - [Distributions.jl](https://juliastats.org/Distributions.jl) + - [MCMCChains.jl](https://turinglang.org/MCMCChains.jl) + - [AbstractMCMC.jl](https://turinglang.org/AbstractMCMC.jl) + - [Bijectors.jl](https://turinglang.org/Bijectors.jl) + - [Libtask.jl](https://github.com/TuringLang/Libtask.jl) + +Please see the individual packages for their documentation. + +## Individual exports and re-exports + +**All** of the following symbols are exported unqualified by Turing, even though the documentation suggests that many of them are qualified. +That means, for example, you can just write + +```julia +using Turing + +@model function my_model(...) + +sample(my_model(), Prior(), 100) +``` + +instead of + +```julia +sample(model, Turing.Inference.Prior(), 100) +``` + +even though [`Prior()`](@ref) is actually defined in the `Turing.Inference` module. + +### Modelling + +| Exported symbol | Documentation | Description | +| --- | --- | --- | +| `@model` | [`DynamicPPL.@model`](@extref) | Define a probabilistic model | +| `@varname` | [`AbstractPPL.@varname`](@extref) | Generate a `VarName` from a Julia expression | +| `@submodel` | [`DynamicPPL.@submodel`](@extref) | Define a submodel | + +### Inference + +| Exported symbol | Documentation | Description | +| --- | --- | --- | +| `sample` | [`StatsBase.sample`](https://turinglang.org/AbstractMCMC.jl/stable/api/#Sampling-a-single-chain) | Sample from a model | + +### Samplers + +| Exported symbol | Documentation | Description | +| --- | --- | --- | +| `Prior` | [`Turing.Inference.Prior`](@ref) | Sample from the prior distribution | +| `MH` | [`Turing.Inference.MH`](@ref) | Metropolis–Hastings | +| `Emcee` | [`Turing.Inference.Emcee`](@ref) | Affine-invariant ensemble sampler | +| `ESS` | [`Turing.Inference.ESS`](@ref) | Elliptical slice sampling | +| `Gibbs` | [`Turing.Inference.Gibbs`](@ref) | Gibbs sampling | +| `GibbsConditional` | [`Turing.Inference.GibbsConditional`](@ref) | A "pseudo-sampler" to provide analytical conditionals to `Gibbs` | +| `HMC` | [`Turing.Inference.HMC`](@ref) | Hamiltonian Monte Carlo | +| `SGLD` | [`Turing.Inference.SGLD`](@ref) | Stochastic gradient Langevin dynamics | +| `SGHMC` | [`Turing.Inference.SGHMC`](@ref) | Stochastic gradient Hamiltonian Monte Carlo | +| `HMCDA` | [`Turing.Inference.HMCDA`](@ref) | Hamiltonian Monte Carlo with dual averaging | +| `NUTS` | [`Turing.Inference.NUTS`](@ref) | No-U-Turn Sampler | +| `PolynomialStepsize` | [`Turing.Inference.PolynomialStepsize`](@ref) | A function to generate a polynomially decaying step size | +| `IS` | [`Turing.Inference.IS`](@ref) | Importance sampling | +| `SMC` | [`Turing.Inference.SMC`](@ref) | Sequential Monte Carlo | +| `PG` | [`Turing.Inference.PG`](@ref) | Particle Gibbs | +| `CSMC` | [`Turing.Inference.CSMC`](@ref) | The same as PG | +| `externalsampler` | [`Turing.Inference.externalsampler`](@ref) | Wrap an external sampler for use in Turing | + +### Variational inference + +These functions are not (yet) formally documented. +Please see the [variational inference tutorial](https://turinglang.org/docs/tutorials/09-variational-inference/) for a walkthrough on how to use these. + +TODO: Generate docs for AdvancedVI 0.2, update the InterLinks object.inv, and update the table below to include `@extref`. + +| Exported symbol | Documentation | Description | +| --- | --- | --- | +| `vi` | `AdvancedVI.vi` | Perform variational inference | +| `ADVI` | `AdvancedVI.ADVI` | Construct an instance of a VI algorithm | + +### Automatic differentiation types + +These are used to specify the automatic differentiation backend to use. + +| Exported symbol | Documentation | Description | +| --- | --- | --- | +| `AutoForwardDiff` | [`ADTypes.AutoForwardDiff`](@extref) | ForwardDiff.jl backend | +| `AutoReverseDiff` | [`ADTypes.AutoReverseDiff`](@extref) | ReverseDiff.jl backend | +| `AutoZygote` | [`ADTypes.AutoZygote`](@extref) | Zygote.jl backend | +| `AutoTracker` | [`ADTypes.AutoTracker`](@extref) | Tracker.jl backend | +| `AutoTapir` | [`ADTypes.AutoTapir`](@extref) | Tapir.jl backend, only for ADTypes >= 1.0 | + +### Debugging + +```@docs +setprogress! +``` + +### Distributions + +These distributions are defined in Turing.jl, but not in Distributions.jl. + +```@docs +Flat +FlatPos +BinomialLogit +OrderedLogistic +LogPoisson +``` + +`BernoulliLogit` is part of Distributions.jl since version 0.25.77. +If you are using an older version of Distributions where this isn't defined, Turing will export the same distribution. + +```@docs +Distributions.BernoulliLogit +``` + +### Tools to work with distributions + +TODO: DistributionsAD needs docs + +| Exported symbol | Documentation | Description | +| --- | --- | --- | +| `filldist` | `DistributionsAD.filldist` | Create a product distribution from a distribution and integers | +| `arraydist` | `DistributionsAD.arraydist` | Create a product distribution from an array of distributions | +| `NamedDist` | [`DynamicPPL.NamedDist`](@extref) | A distribution that carries the name of the variable | + +### Predictions + +```@docs +predict +``` + +### Re-exports from DynamicPPL / AbstractPPL + +| Exported symbol | Documentation | Description | +| --- | --- | --- | +| `pointwise_loglikelihoods` | [`DynamicPPL.pointwise_loglikelihoods`](@extref) | Compute log likelihoods for each sample in a chain | +| `generated_quantities` | [`DynamicPPL.generated_quantities`](@extref) | Calculate additional quantities defined in a model | +| `logprior` | [`DynamicPPL.logprior`](@extref) | Compute log prior probability | +| `logjoint` | [`DynamicPPL.logjoint`](@extref) | Compute log joint probability | +| `LogDensityFunction` | [`DynamicPPL.LogDensityFunction`](@extref) | Wrap a Turing model to satisfy LogDensityFunctions.jl interface | +| `condition` | [`AbstractPPL.condition`](@extref) | Condition a model on data | +| `decondition` | [`AbstractPPL.decondition`](@extref) | Remove conditioning on data | +| `conditioned` | [`DynamicPPL.conditioned`](@extref) | Return the conditioned values of a model | +| `fix` | [`DynamicPPL.fix`](@extref) | Fix the value of a variable | +| `unfix` | [`DynamicPPL.unfix`](@extref) | Unfix the value of a variable | +| `OrderedDict` | [`OrderedCollections.OrderedDict`](https://juliacollections.github.io/OrderedCollections.jl/dev/ordered_containers/#OrderedDicts) | An ordered dictionary | + +### Extra re-exports from Bijectors + +Note that Bijectors itself does not export `ordered`. + +```@docs +Bijectors.ordered +``` + +### Point estimates + +| Exported symbol | Documentation | Description | +| --- | --- | --- | +| `maximum_a_posteriori` | [`Turing.Optimisation.maximum_a_posteriori`](@ref) | Find a MAP estimate for a model | +| `maximum_likelihood` | [`Turing.Optimisation.maximum_likelihood`](@ref) | Find a MLE estimate for a model | +| `MAP` | [`Turing.Optimisation.MAP`](@ref) | Type to use with Optim.jl for MAP estimation | +| `MLE` | [`Turing.Optimisation.MLE`](@ref) | Type to use with Optim.jl for MLE estimation | + diff --git a/docs/src/api/Inference.md b/docs/src/api/Inference.md new file mode 100644 index 000000000..7c1bbb430 --- /dev/null +++ b/docs/src/api/Inference.md @@ -0,0 +1,6 @@ +# API: `Turing.Inference` + +```@autodocs +Modules = [Turing.Inference] +Order = [:type, :function] +``` diff --git a/docs/src/api/Optimisation.md b/docs/src/api/Optimisation.md new file mode 100644 index 000000000..91e7b1ede --- /dev/null +++ b/docs/src/api/Optimisation.md @@ -0,0 +1,6 @@ +# API: `Turing.Optimisation` + +```@autodocs +Modules = [Turing.Optimisation] +Order = [:type, :function] +``` diff --git a/docs/src/index.md b/docs/src/index.md new file mode 100644 index 000000000..93e7e3b95 --- /dev/null +++ b/docs/src/index.md @@ -0,0 +1,5 @@ +# Turing.jl + +This site contains the API documentation for the identifiers exported by Turing.jl. + +If you are looking for usage examples and guides, please visit [https://turinglang.org/docs](https://turinglang.org/docs). diff --git a/docs/src/library/advancedhmc.md b/docs/src/library/advancedhmc.md deleted file mode 100644 index 84f712f4d..000000000 --- a/docs/src/library/advancedhmc.md +++ /dev/null @@ -1,25 +0,0 @@ ---- -title: AdvancedHMC -permalink: /docs/library/advancedhmc/ -toc: true ---- - -## Index - -```@index -Modules = [AdvancedHMC] -``` - -## Functions - -```@autodocs -Modules = [AdvancedHMC] -Order = [:function] -``` - -## Types - -```@autodocs -Modules = [AdvancedHMC] -Order = [:type] -``` diff --git a/docs/src/library/api.md b/docs/src/library/api.md deleted file mode 100644 index c598820b7..000000000 --- a/docs/src/library/api.md +++ /dev/null @@ -1,45 +0,0 @@ ---- -title: API -permalink: /docs/library/ -toc: true ---- - -```@meta -CurrentModule = Turing -``` - -## Index - -```@index -Modules = [Turing, Turing.Essential, Turing.Inference, Libtask] -``` - -## Modelling - -```@docs -@model -``` - -## Samplers - -```@docs -Sampler -Gibbs -HMC -HMCDA -IS -MH -NUTS -PG -SMC -``` - -## Distributions - -```@docs -Flat -FlatPos -BinomialLogit -VecBinomialLogit -OrderedLogistic -``` diff --git a/docs/src/library/bijectors.md b/docs/src/library/bijectors.md deleted file mode 100644 index 471da45fe..000000000 --- a/docs/src/library/bijectors.md +++ /dev/null @@ -1,25 +0,0 @@ ---- -title: Bijectors -permalink: /docs/library/bijectors/ -toc: true ---- - -## Index - -```@index -Modules = [Bijectors] -``` - -## Functions - -```@autodocs -Modules = [Bijectors] -Order = [:function] -``` - -## Types - -```@autodocs -Modules = [Bijectors] -Order = [:type] -``` diff --git a/src/Turing.jl b/src/Turing.jl index 8dfb8df28..873c270fa 100644 --- a/src/Turing.jl +++ b/src/Turing.jl @@ -100,8 +100,8 @@ export @model, # modelling vi, # variational inference ADVI, sample, # inference - @logprob_str, - @prob_str, + @logprob_str, # TODO: Remove, see https://github.com/TuringLang/DynamicPPL.jl/issues/356 + @prob_str, # TODO: Remove, see https://github.com/TuringLang/DynamicPPL.jl/issues/356 externalsampler, AutoForwardDiff, # ADTypes AutoReverseDiff, diff --git a/src/mcmc/Inference.jl b/src/mcmc/Inference.jl index b7bdf206b..7c00d3d59 100644 --- a/src/mcmc/Inference.jl +++ b/src/mcmc/Inference.jl @@ -168,7 +168,11 @@ function DynamicPPL.unflatten(vi::TypedVarInfo, θ::NamedTuple) end DynamicPPL.unflatten(vi::SimpleVarInfo, θ::NamedTuple) = SimpleVarInfo(θ, vi.logp, vi.transformation) -# Algorithm for sampling from the prior +""" + Prior() + +Algorithm for sampling from the prior. +""" struct Prior <: InferenceAlgorithm end function AbstractMCMC.step( diff --git a/src/mcmc/emcee.jl b/src/mcmc/emcee.jl index 4ebe49ea5..ebdfa041d 100644 --- a/src/mcmc/emcee.jl +++ b/src/mcmc/emcee.jl @@ -2,6 +2,17 @@ ### Sampler states ### +""" + Emcee(n_walkers::Int, stretch_length=2.0) + +Affine-invariant ensemble sampling algorithm. + +# Reference + +Foreman-Mackey, D., Hogg, D. W., Lang, D., & Goodman, J. (2013). +emcee: The MCMC Hammer. Publications of the Astronomical Society of the +Pacific, 125 (925), 306. https://doi.org/10.1086/670067 +""" struct Emcee{space,E<:AMH.Ensemble} <: InferenceAlgorithm ensemble::E end diff --git a/src/mcmc/particle_mcmc.jl b/src/mcmc/particle_mcmc.jl index 02a53766e..579ebfae2 100644 --- a/src/mcmc/particle_mcmc.jl +++ b/src/mcmc/particle_mcmc.jl @@ -220,6 +220,11 @@ function PG(nparticles::Int, space::Tuple) return PG(nparticles, AdvancedPS.ResampleWithESSThreshold(), space) end +""" + CSMC(...) + +Equivalent to [`PG`](@ref). +""" const CSMC = PG # type alias of PG as Conditional SMC struct PGTransition{T,F<:AbstractFloat} <: AbstractTransition diff --git a/src/optimisation/Optimisation.jl b/src/optimisation/Optimisation.jl index eecfcad22..414561bde 100644 --- a/src/optimisation/Optimisation.jl +++ b/src/optimisation/Optimisation.jl @@ -25,10 +25,22 @@ export MAP, MLE ModeEstimator An abstract type to mark whether mode estimation is to be done with maximum a posteriori -(MAP) or maximum likelihood estimation (MLE). +(MAP) or maximum likelihood estimation (MLE). This is only needed for the Optim.jl interface. """ abstract type ModeEstimator end + +""" + MLE <: ModeEstimator + +Concrete type for maximum likelihood estimation. Only used for the Optim.jl interface. +""" struct MLE <: ModeEstimator end + +""" + MAP <: ModeEstimator + +Concrete type for maximum a posteriori estimation. Only used for the Optim.jl interface. +""" struct MAP <: ModeEstimator end """ diff --git a/src/stdlib/distributions.jl b/src/stdlib/distributions.jl index c2b92c29d..568ab3ae3 100644 --- a/src/stdlib/distributions.jl +++ b/src/stdlib/distributions.jl @@ -207,12 +207,12 @@ end The *Poisson distribution* with logarithmic parameterization of the rate parameter describes the number of independent events occurring within a unit time interval, given the -average rate of occurrence ``exp(logλ)``. +average rate of occurrence ``\\exp(\\log\\lambda)``. The distribution has the probability mass function ```math -P(X = k) = \\frac{e^{k \\cdot logλ}{k!} e^{-e^{logλ}}, \\quad \\text{ for } k = 0,1,2,\\ldots. +P(X = k) = \\frac{e^{k \\cdot \\log\\lambda}}{k!} e^{-e^{\\log\\lambda}}, \\quad \\text{ for } k = 0,1,2,\\ldots. ``` See also: [`Poisson`](@ref)