-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathnested-sampling.bib
284 lines (268 loc) · 26.7 KB
/
nested-sampling.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
@article{ashtonNestedSamplingPhysical2022,
title = {Nested Sampling for Physical Scientists},
author = {Ashton, Greg and Bernstein, Noam and Buchner, Johannes and Chen, Xi and Csányi, Gábor and Fowlie, Andrew and Feroz, Farhan and Griffiths, Matthew and Handley, Will and Habeck, Michael and Higson, Edward and Hobson, Michael and Lasenby, Anthony and Parkinson, David and Pártay, Livia B. and Pitkin, Matthew and Schneider, Doris and Speagle, Joshua S. and South, Leah and Veitch, John and Wacker, Philipp and Wales, David J. and Yallup, David},
date = {2022-05-26},
journaltitle = {Nature Reviews Methods Primers},
shortjournal = {Nat Rev Methods Primers},
volume = {2},
number = {1},
pages = {1--22},
publisher = {Nature Publishing Group},
issn = {2662-8449},
doi = {10.1038/s43586-022-00121-x},
url = {https://www.nature.com/articles/s43586-022-00121-x},
urldate = {2024-11-23},
abstract = {This Primer examines Skilling’s nested sampling algorithm for Bayesian inference and, more broadly, multidimensional integration. The principles of nested sampling are summarized and recent developments using efficient nested sampling algorithms in high dimensions~surveyed, including methods for sampling from the constrained prior. Different ways of applying nested sampling are outlined, with detailed examples from three scientific fields: cosmology, gravitational-wave astronomy and materials science. Finally, the Primer includes recommendations for best practices and a discussion of potential limitations and optimizations of nested sampling.},
langid = {english},
keywords = {Statistical physics,Statistics},
file = {/home/jacopo/Zotero/storage/78F2DMGC/Ashton et al. - 2022 - Nested sampling for physical scientists.pdf}
}
@book{billingsleyProbabilityMeasure1995,
title = {Probability and Measure},
author = {Billingsley, Patrick},
date = {1995},
series = {Wiley Series in Probability and Mathematical Statistics},
edition = {3. ed},
publisher = {Wiley},
location = {New York, NY},
isbn = {978-0-471-00710-4},
langid = {english},
pagetotal = {593},
file = {/home/jacopo/Zotero/storage/5JS2GV7Y/Billingsley - 1995 - Probability and measure.pdf}
}
@online{buchnerIntuitionPhysicistsInformation2022,
title = {An Intuition for Physicists: Information Gain from Experiments},
shorttitle = {An Intuition for Physicists},
author = {Buchner, Johannes},
date = {2022-08-26},
eprint = {2205.00009},
eprinttype = {arXiv},
eprintclass = {astro-ph, physics:cond-mat, physics:physics},
doi = {10.48550/arXiv.2205.00009},
url = {http://arxiv.org/abs/2205.00009},
urldate = {2024-05-29},
abstract = {How much one has learned from an experiment is quantifiable by the information gain, also known as the Kullback-Leibler divergence. The narrowing of the posterior parameter distribution \$P(\textbackslash theta|D)\$ compared with the prior parameter distribution \$\textbackslash pi(\textbackslash theta)\$, is quantified in units of bits, as: \$ D\_\{\textbackslash mathrm\{KL\}\}(P|\textbackslash pi)=\textbackslash int\textbackslash log\_\{2\}\textbackslash left(\textbackslash frac\{P(\textbackslash theta|D)\}\{\textbackslash pi(\textbackslash theta)\}\textbackslash right)\textbackslash,P(\textbackslash theta|D)\textbackslash,d\textbackslash theta \$. This research note gives an intuition what one bit of information gain means. It corresponds to a Gaussian shrinking its standard deviation by a factor of three.},
pubstate = {prepublished},
version = {3},
keywords = {Astrophysics - Instrumentation and Methods for Astrophysics,Condensed Matter - Statistical Mechanics,Physics - Data Analysis Statistics and Probability},
file = {/home/jacopo/Zotero/storage/8RW7LGYW/Buchner - 2022 - An intuition for physicists information gain from.pdf;/home/jacopo/Zotero/storage/D27ZDLT4/2205.html}
}
@unpublished{buchnerNestedSamplingMethods2021,
title = {Nested {{Sampling Methods}}},
author = {Buchner, Johannes},
date = {2021-07-13},
eprint = {2101.09675},
eprinttype = {arXiv},
eprintclass = {astro-ph, stat},
url = {http://arxiv.org/abs/2101.09675},
urldate = {2021-10-19},
abstract = {Nested sampling (NS) computes parameter posterior distributions and makes Bayesian model comparison computationally feasible. Its strengths are the unsupervised navigation of complex, potentially multi-modal posteriors until a well-defined termination point. A systematic literature review of nested sampling algorithms and variants is presented. We focus on complete algorithms, including solutions to likelihood-restricted prior sampling, parallelisation, termination and diagnostics. The relation between number of live points, dimensionality and computational cost is studied for two complete algorithms. A new formulation of NS is presented, which casts the parameter space exploration as a search on a tree. Previously published ways of obtaining robust error estimates and dynamic variations of the number of live points are presented as special cases of this formulation. A new on-line diagnostic test is presented based on previous insertion rank order work. The survey of nested sampling methods concludes with outlooks for future research.},
keywords = {Astrophysics - Instrumentation and Methods for Astrophysics,Statistics - Computation},
file = {/home/jacopo/Zotero/storage/PUU2NMMQ/Buchner_2021_Nested Sampling Methods.pdf;/home/jacopo/Zotero/storage/GCVJUHDH/2101.html}
}
@online{chenBayesianPosteriorRepartitioning2022,
title = {Bayesian Posterior Repartitioning for Nested Sampling},
author = {Chen, Xi and Feroz, Farhan and Hobson, Michael},
date = {2022-07-04},
eprint = {1908.04655},
eprinttype = {arXiv},
doi = {10.48550/arXiv.1908.04655},
url = {http://arxiv.org/abs/1908.04655},
urldate = {2024-11-27},
abstract = {Priors in Bayesian analyses often encode informative domain knowledge that can be useful in making the inference process more efficient. Occasionally, however, priors may be unrepresentative of the parameter values for a given dataset, which can result in inefficient parameter space exploration, or even incorrect inferences, particularly for nested sampling (NS) algorithms. Simply broadening the prior in such cases may be inappropriate or impossible in some applications. Hence our previous solution to this problem, known as posterior repartitioning (PR), redefines the prior and likelihood while keeping their product fixed, so that the posterior inferences and evidence estimates remain unchanged, but the efficiency of the NS process is significantly increased. In its most practical form, PR raises the prior to some power beta, which is introduced as an auxiliary variable that must be determined on a case-by-case basis, usually by lowering beta from unity according to some pre-defined `annealing schedule' until the resulting inferences converge to a consistent solution. Here we present a very simple yet powerful alternative Bayesian approach, in which beta is instead treated as a hyperparameter that is inferred from the data alongside the original parameters of the problem, and then marginalised over to obtain the final inference. We show through numerical examples that this Bayesian PR (BPR) method provides a very robust, self-adapting and computationally efficient `hands-off' solution to the problem of unrepresentative priors in Bayesian inference using NS. Moreover, unlike the original PR method, we show that even for representative priors BPR has a negligible computational overhead relative to standard nesting sampling, which suggests that it should be used as the default in all NS analyses.},
pubstate = {prepublished},
keywords = {Astrophysics - Instrumentation and Methods for Astrophysics,Computer Science - Neural and Evolutionary Computing,Statistics - Computation},
file = {/home/jacopo/Zotero/storage/XH4EBV49/Chen et al. - 2022 - Bayesian posterior repartitioning for nested sampl.pdf;/home/jacopo/Zotero/storage/HPVD7B8D/1908.html}
}
@article{chopinPropertiesNestedSampling2010,
title = {Properties of Nested Sampling},
author = {Chopin, Nicolas and Robert, Christian P.},
date = {2010-09-01},
journaltitle = {Biometrika},
shortjournal = {Biometrika},
volume = {97},
number = {3},
pages = {741--755},
issn = {0006-3444},
doi = {10.1093/biomet/asq021},
url = {https://doi.org/10.1093/biomet/asq021},
urldate = {2024-11-25},
abstract = {Nested sampling is a simulation method for approximating marginal likelihoods. We establish that nested sampling has an approximation error that vanishes at the standard Monte Carlo rate and that this error is asymptotically Gaussian. It is shown that the asymptotic variance of the nested sampling approximation typically grows linearly with the dimension of the parameter. We discuss the applicability and efficiency of nested sampling in realistic problems, and compare it with two current methods for computing marginal likelihood. Finally, we propose an extension that avoids resorting to Markov chain Monte Carlo simulation to obtain the simulated points.},
file = {/home/jacopo/Zotero/storage/WTVSHENE/Chopin and Robert - 2010 - Properties of nested sampling.pdf;/home/jacopo/Zotero/storage/U8BAXHJE/243485.html}
}
@book{coverElementsInformationTheory2006,
title = {Elements of {{Information Theory}} 2nd {{Edition}}},
author = {Cover, Thomas M. and Thomas, Joy A.},
date = {2006-07-18},
edition = {2nd edition},
publisher = {Wiley-Interscience},
location = {Hoboken, N.J},
abstract = {The latest edition of this classic is updated with new problem sets and material The Second Edition of this fundamental textbook maintains the book's tradition of clear, thought-provoking instruction. Readers are provided once again with an instructive mix of mathematics, physics, statistics, and information theory. All the essential topics in information theory are covered in detail, including entropy, data compression, channel capacity, rate distortion, network information theory, and hypothesis testing. The authors provide readers with a solid understanding of the underlying theory and applications. Problem sets and a telegraphic summary at the end of each chapter further assist readers. The historical notes that follow each chapter recap the main points. The Second Edition features: * Chapters reorganized to improve teaching * 200 new problems * New material on source coding, portfolio theory, and feedback capacity * Updated references Now current and enhanced, the Second Edition of Elements of Information Theory remains the ideal textbook for upper-level undergraduate and graduate courses in electrical engineering, statistics, and telecommunications.},
isbn = {978-0-471-24195-9},
langid = {english},
pagetotal = {784}
}
@online{handleyPolyChordNestedSampling2015,
title = {{{PolyChord}}: Nested Sampling for Cosmology},
shorttitle = {{{PolyChord}}},
author = {Handley, W. J. and Hobson, M. P. and Lasenby, A. N.},
date = {2015-03-28},
eprint = {1502.01856},
eprinttype = {arXiv},
doi = {10.48550/arXiv.1502.01856},
url = {http://arxiv.org/abs/1502.01856},
urldate = {2024-11-22},
abstract = {PolyChord is a novel nested sampling algorithm tailored for high dimensional parameter spaces. In addition, it can fully exploit a hierarchy of parameter speeds such as is found in CosmoMC and CAMB. It utilises slice sampling at each iteration to sample within the hard likelihood constraint of nested sampling. It can identify and evolve separate modes of a posterior semi-independently and is parallelised using openMPI. PolyChord is available for download at: http://ccpforge.cse.rl.ac.uk/gf/project/polychord/},
pubstate = {prepublished},
keywords = {Astrophysics - Cosmology and Nongalactic Astrophysics,Astrophysics - Instrumentation and Methods for Astrophysics},
file = {/home/jacopo/Zotero/storage/SHRKM2XY/Handley et al. - 2015 - PolyChord nested sampling for cosmology.pdf;/home/jacopo/Zotero/storage/NEKBUEIP/1502.html}
}
@article{higsonDynamicNestedSampling2019,
title = {Dynamic Nested Sampling: An Improved Algorithm for Parameter Estimation and Evidence Calculation},
shorttitle = {Dynamic Nested Sampling},
author = {Higson, Edward and Handley, Will and Hobson, Michael and Lasenby, Anthony},
date = {2019-09-01},
journaltitle = {Statistics and Computing},
shortjournal = {Stat Comput},
volume = {29},
number = {5},
pages = {891--913},
issn = {1573-1375},
doi = {10.1007/s11222-018-9844-0},
url = {https://doi.org/10.1007/s11222-018-9844-0},
urldate = {2024-11-25},
abstract = {We introduce dynamic nested sampling: a generalisation of the nested sampling algorithm in which the number of “live points” varies to allocate samples more efficiently. In empirical tests the new method significantly improves calculation accuracy compared to standard nested sampling with the same number of samples; this increase in accuracy is equivalent to speeding up the computation by factors of up to \$\$\textbackslash sim 72\$\$for parameter estimation and \$\$\textbackslash sim 7\$\$for evidence calculations. We also show that the accuracy of both parameter estimation and evidence calculations can be improved simultaneously. In addition, unlike in standard nested sampling, more accurate results can be obtained by continuing the calculation for longer. Popular standard nested sampling implementations can be easily adapted to perform dynamic nested sampling, and several dynamic nested sampling software packages are now publicly available.},
langid = {english},
keywords = {Artificial Intelligence,Bayesian computation,Bayesian evidence,Nested sampling,Parameter estimation},
file = {/home/jacopo/Zotero/storage/IZTV2S4F/Higson et al. - 2019 - Dynamic nested sampling an improved algorithm for.pdf}
}
@unpublished{knuthTwoNotesNotation1992,
title = {Two Notes on Notation},
author = {Knuth, Donald E.},
date = {1992-04-30},
eprint = {math/9205211},
eprinttype = {arXiv},
url = {http://arxiv.org/abs/math/9205211},
urldate = {2020-03-04},
abstract = {The author advocates two specific mathematical notations from his popular course and joint textbook, "Concrete Mathematics". The first of these, extending an idea of Iverson, is the notation "[P]" for the function which is 1 when the Boolean condition P is true and 0 otherwise. This notation can encourage and clarify the use of characteristic functions and Kronecker deltas in sums and integrals. The second notation puts Stirling numbers on the same footing as binomial coefficients. Since binomial coefficients are written on two lines in parentheses and read "n choose k", Stirling numbers of the first kind should be written on two lines in brackets and read "n cycle k", while Stirling numbers of the second kind should be written in braces and read "n subset k". (I might say "n partition k".) The written form was first suggested by Imanuel Marx. The virtues of this notation are that Stirling partition numbers frequently appear in combinatorics, and that it more clearly presents functional relations similar to those satisfied by binomial coefficients.},
keywords = {Mathematics - History and Overview},
file = {/home/jacopo/Zotero/storage/3UUDI7X7/Knuth - 1992 - Two notes on notation.pdf;/home/jacopo/Zotero/storage/IAG3XHLT/9205211.html}
}
@online{petrosyanSuperNestAcceleratedNested2022,
title = {{{SuperNest}}: Accelerated Nested Sampling Applied to Astrophysics and Cosmology},
shorttitle = {{{SuperNest}}},
author = {Petrosyan, Aleksandr and Handley, William James},
date = {2022-12-04},
eprint = {2212.01760},
eprinttype = {arXiv},
eprintclass = {astro-ph, physics:physics},
url = {http://arxiv.org/abs/2212.01760},
urldate = {2022-12-06},
abstract = {We present a method for improving the performance of nested sampling as well as its accuracy. Building on previous work by Chen et al., we show that posterior repartitioning may be used to reduce the amount of time nested sampling spends in compressing from prior to posterior if a suitable ``proposal'' distribution is supplied. We showcase this on a cosmological example with a Gaussian posterior, and release the code as an LGPL licensed, extensible Python package https://gitlab.com/a-p-petrosyan/sspr.},
pubstate = {prepublished},
keywords = {Astrophysics - Cosmology and Nongalactic Astrophysics,Physics - Computational Physics},
file = {/home/jacopo/Zotero/storage/YESGL27C/Petrosyan and Handley - 2022 - SuperNest accelerated nested sampling applied to .pdf;/home/jacopo/Zotero/storage/938JT4X9/2212.html}
}
@article{romero-shawWhenModelsFail2022,
title = {When Models Fail: An Introduction to Posterior Predictive Checks and Model Misspecification in Gravitational-Wave Astronomy},
shorttitle = {When Models Fail},
author = {Romero-Shaw, Isobel M. and Thrane, Eric and Lasky, Paul D.},
date = {2022},
journaltitle = {Publications of the Astronomical Society of Australia},
shortjournal = {Publ. Astron. Soc. Aust.},
volume = {39},
eprint = {2202.05479},
eprinttype = {arXiv},
eprintclass = {astro-ph, physics:gr-qc},
pages = {e025},
issn = {1323-3580, 1448-6083},
doi = {10.1017/pasa.2022.24},
url = {http://arxiv.org/abs/2202.05479},
urldate = {2024-02-24},
abstract = {Bayesian inference is a powerful tool in gravitational-wave astronomy. It enables us to deduce the properties of merging compact-object binaries and to determine how these mergers are distributed as a population according to mass, spin, and redshift. As key results are increasingly derived using Bayesian inference, there is increasing scrutiny on Bayesian methods. In this review, we discuss the phenomenon of \textbackslash textit\{model misspecification\}, in which results obtained with Bayesian inference are misleading because of deficiencies in the assumed model(s). Such deficiencies can impede our inferences of the true parameters describing physical systems. They can also reduce our ability to distinguish the "best fitting" model: it can be misleading to say that Model\textasciitilde A is preferred over Model\textasciitilde B if both models are manifestly poor descriptions of reality. Broadly speaking, there are two ways in which models fail: models that fail to adequately describe the data (either the signal or the noise) have misspecified likelihoods. Population models -- designed, for example, to describe the distribution of black hole masses -- may fail to adequately describe the true population due to a misspecified prior. We recommend tests and checks that are useful for spotting misspecified models using examples inspired by gravitational-wave astronomy. We include companion python notebooks to illustrate essential concepts.},
keywords = {Astrophysics - Instrumentation and Methods for Astrophysics,General Relativity and Quantum Cosmology},
file = {/home/jacopo/Zotero/storage/L3ZGVXBH/Romero-Shaw et al. - 2022 - When models fail an introduction to posterior pre.pdf;/home/jacopo/Zotero/storage/7VW2EV8L/2202.html}
}
@online{rouletInferringBinaryProperties2024,
title = {Inferring {{Binary Properties}} from {{Gravitational Wave Signals}}},
author = {Roulet, Javier and Venumadhav, Tejaswi},
date = {2024-02-17},
eprint = {2402.11439},
eprinttype = {arXiv},
eprintclass = {astro-ph, physics:gr-qc},
doi = {10.1146/annurev-nucl-121423-100725},
url = {http://arxiv.org/abs/2402.11439},
urldate = {2024-02-20},
abstract = {This review provides a conceptual and technical survey of methods for parameter estimation of gravitational wave signals in ground-based interferometers such as LIGO and Virgo. We introduce the framework of Bayesian inference and provide an overview of models for the generation and detection of gravitational waves from compact binary mergers, focusing on the essential features that are observable in the signals. Within the traditional likelihood-based paradigm, we describe various approaches for enhancing the efficiency and robustness of parameter inference. This includes techniques for accelerating likelihood evaluations, such as heterodyne/relative binning, reduced-order quadrature, multibanding and interpolation. We also cover methods to simplify the analysis to improve convergence, via reparametrization, importance sampling and marginalization. We end with a discussion of recent developments in the application of likelihood-free (simulation-based) inference methods to gravitational wave data analysis.},
pubstate = {prepublished},
keywords = {Astrophysics - High Energy Astrophysical Phenomena,General Relativity and Quantum Cosmology},
file = {/home/jacopo/Zotero/storage/9HIFFZMH/Roulet and Venumadhav - 2024 - Inferring Binary Properties from Gravitational Wav.pdf;/home/jacopo/Zotero/storage/9QZTGYEB/2402.html}
}
@book{siviaDataAnalysisBayesian2006,
title = {Data {{Analysis}}: {{A Bayesian Tutorial}}},
shorttitle = {Data {{Analysis}}},
author = {Sivia, Devinderjit and Skilling, John},
date = {2006-06},
eprint = {lYMSDAAAQBAJ},
eprinttype = {googlebooks},
publisher = {Oxford University Press},
abstract = {Statistics lectures have been a source of much bewilderment and frustration for generations of students. This book attempts to remedy the situation by expounding a logical and unified approach to the whole subject of data analysis. This text is intended as a tutorial guide for senior undergraduates and research students in science and engineering. After explaining the basic principles of Bayesian probability theory, their use is illustrated with a variety of examples ranging from elementary parameter estimation to image processing. Other topics covered include reliability analysis, multivariate optimization, least-squares and maximum likelihood, error-propagation, hypothesis testing, maximum entropy and experimental design. The Second Edition of this successful tutorial book contains a new chapter on extensions to the ubiquitous least-squares procedure, allowing for the straightforward handling of outliers and unknown correlated noise, and a cutting-edge contribution from John Skilling on a novel numerical technique for Bayesian computation called 'nested sampling'.},
isbn = {978-0-19-856831-5},
langid = {english},
pagetotal = {259},
keywords = {Mathematics / Applied,Mathematics / Probability & Statistics / Bayesian Analysis,Mathematics / Probability & Statistics / General,Science / Physics / General},
file = {/home/jacopo/Zotero/storage/FPF4636S/Sivia and Skilling - 2006 - Data Analysis A Bayesian Tutorial.pdf}
}
@inproceedings{skillingNestedSampling2004,
title = {Nested {{Sampling}}},
booktitle = {{{AIP Conference Proceedings}}},
author = {Skilling, John},
date = {2004},
volume = {735},
pages = {395--405},
publisher = {AIP},
location = {Garching (Germany)},
issn = {0094243X},
doi = {10.1063/1.1835238},
url = {https://pubs.aip.org/aip/acp/article/735/1/395-405/748716},
urldate = {2024-11-25},
abstract = {The evidence Z is often the single most important number in the [Bayesian] problem and I think every effort should be devoted to calculating it” (MacKay 2003)[1]. Nested sampling does this by giving a direct estimate of the density of states. Posterior samples are an optional byproduct.},
eventtitle = {{{BAYESIAN INFERENCE AND MAXIMUM ENTROPY METHODS IN SCIENCE AND ENGINEERING}}: 24th {{International Workshop}} on {{Bayesian Inference}} and {{Maximum Entropy Methods}} in {{Science}} and {{Engineering}}},
langid = {english},
file = {/home/jacopo/Zotero/storage/3WGY3HK7/Skilling - 2004 - Nested Sampling.pdf}
}
@article{skillingNestedSamplingGeneral2006,
title = {Nested Sampling for General {{Bayesian}} Computation},
author = {Skilling, John},
date = {2006-12},
journaltitle = {Bayesian Analysis},
volume = {1},
number = {4},
pages = {833--859},
publisher = {International Society for Bayesian Analysis},
issn = {1936-0975, 1931-6690},
doi = {10.1214/06-BA127},
url = {https://projecteuclid.org/journals/bayesian-analysis/volume-1/issue-4/Nested-sampling-for-general-Bayesian-computation/10.1214/06-BA127.full},
urldate = {2021-09-15},
abstract = {Nested sampling estimates directly how the likelihood function relates to prior mass. The evidence (alternatively the marginal likelihood, marginal density of the data, or the prior predictive) is immediately obtained by summation. It is the prime result of the computation, and is accompanied by an estimate of numerical uncertainty. Samples from the posterior distribution are an optional by-product, obtainable for any temperature. The method relies on sampling within a hard constraint on likelihood value, as opposed to the softened likelihood of annealing methods. Progress depends only on the shape of the "nested" contours of likelihood, and not on the likelihood values. This invariance (over monotonic re-labelling) allows the method to deal with a class of phase-change problems which effectively defeat thermal annealing.},
keywords = {algorithm,annealing,Bayesian computation,evidence,marginal likelihood,Model selection,nest,phase change},
file = {/home/jacopo/Zotero/storage/B9ADWEC4/Skilling_2006_Nested sampling for general Bayesian computation.pdf;/home/jacopo/Zotero/storage/6BEH25TG/06-BA127.html}
}
@article{speagleDYNESTYDynamicNested2020,
title = {{{DYNESTY}}: A Dynamic Nested Sampling Package for Estimating {{Bayesian}} Posteriors and Evidences},
shorttitle = {{{DYNESTY}}},
author = {Speagle, Joshua S.},
date = {2020-04-01},
journaltitle = {Monthly Notices of the Royal Astronomical Society},
volume = {493},
pages = {3132--3158},
publisher = {OUP},
issn = {0035-8711},
doi = {10.1093/mnras/staa278},
url = {https://ui.adsabs.harvard.edu/abs/2020MNRAS.493.3132S},
urldate = {2024-11-22},
abstract = {We present DYNESTY, a public, open-source, PYTHON package to estimate Bayesian posteriors and evidences (marginal likelihoods) using the dynamic nested sampling methods developed by Higson et al. By adaptively allocating samples based on posterior structure, dynamic nested sampling has the benefits of Markov chain Monte Carlo (MCMC) algorithms that focus exclusively on posterior estimation while retaining nested sampling's ability to estimate evidences and sample from complex, multimodal distributions. We provide an overview of nested sampling, its extension to dynamic nested sampling, the algorithmic challenges involved, and the various approaches taken to solve them in this and previous work. We then examine DYNESTY's performance on a variety of toy problems along with several astronomical applications. We find in particular problems DYNESTY can provide substantial improvements in sampling efficiency compared to popular MCMC approaches in the astronomical literature. More detailed statistical results related to nested sampling are also included in the appendix.},
keywords = {Astrophysics - Instrumentation and Methods for Astrophysics,methods: data analysis,methods: statistical,Statistics - Computation},
annotation = {ADS Bibcode: 2020MNRAS.493.3132S},
file = {/home/jacopo/Zotero/storage/FGFU7D67/Speagle - 2020 - DYNESTY a dynamic nested sampling package for est.pdf}
}