Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

I 1410 negbinom log priors #1415

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/source/log_priors.rst
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ Example::
Overview:

- :class:`BetaLogPrior`
- :class:`BinomialLogPrior`
- :class:`CauchyLogPrior`
- :class:`ComposedLogPrior`
- :class:`ExponentialLogPrior`
Expand All @@ -24,6 +25,7 @@ Overview:
- :class:`InverseGammaLogPrior`
- :class:`LogNormalLogPrior`
- :class:`MultivariateGaussianLogPrior`
- :class:`NegBinomialLogPrior`
- :class:`NormalLogPrior`
- :class:`StudentTLogPrior`
- :class:`TruncatedGaussianLogPrior`
Expand All @@ -32,6 +34,8 @@ Overview:

.. autoclass:: BetaLogPrior

.. autoclass:: BinomialLogPrior

.. autoclass:: CauchyLogPrior

.. autoclass:: ComposedLogPrior
Expand All @@ -50,6 +54,8 @@ Overview:

.. autoclass:: MultivariateGaussianLogPrior

.. autoclass:: NegBinomialLogPrior

.. autoclass:: NormalLogPrior

.. autoclass:: StudentTLogPrior
Expand Down
91 changes: 48 additions & 43 deletions pints/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,11 @@ def _load_version_int():
except Exception as e: # pragma: no cover
raise RuntimeError('Unable to read version number (' + str(e) + ').')


Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Did these changes get introduced when merging? Might be a good idea to remove them all again!

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Probably, I didn't use modify .init with anything like that

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For some reason I cannot pull the master into my own repo? Does that happen to you too?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmmm, no! Do you have the right permissions to work on a branch in the PINTS repo directly? That might be easier

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've sent you an invitation!

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Having said that, now that we've started this PR Maybe keep it on a fork :D
I think you might need to do something like:

  • switch to your local version of master git checkout master
  • make sure you have the pints-team/pints repo as a "remote". Check with git remote -v and if there's no sign of it you can add a remote with something like git remote add pints https://github.com/pints-team/pints.git (change "pints" to some name you like)
  • pull in master from the remote git pull pints master

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

__version_int__ = _load_version_int()
__version__ = '.'.join([str(x) for x in __version_int__])


#
# Expose pints version
#
Expand All @@ -64,21 +66,21 @@ def version(formatted=False):
#
# Core classes
#
from ._core import ForwardModel, ForwardModelS1
from ._core import TunableMethod
from ._core import SingleOutputProblem, MultiOutputProblem
from ._core import ForwardModel, ForwardModelS1 # noqa
from ._core import TunableMethod # noqa
from ._core import SingleOutputProblem, MultiOutputProblem # noqa

#
# Utility classes and methods
#
from ._util import strfloat, vector, matrix2d
from ._util import Timer
from ._logger import Logger, Loggable
from ._util import strfloat, vector, matrix2d # noqa
from ._util import Timer # noqa
from ._logger import Logger, Loggable # noqa

#
# Logs of probability density functions (not necessarily normalised)
#
from ._log_pdfs import (
from ._log_pdfs import ( # noqa
LogPDF,
LogPrior,
LogPosterior,
Expand All @@ -90,8 +92,9 @@ def version(formatted=False):
#
# Log-priors
#
from ._log_priors import (
from ._log_priors import ( # noqa
BetaLogPrior,
BinomialLogPrior,
CauchyLogPrior,
ComposedLogPrior,
ExponentialLogPrior,
Expand All @@ -101,6 +104,7 @@ def version(formatted=False):
InverseGammaLogPrior,
LogNormalLogPrior,
MultivariateGaussianLogPrior,
NegBinomialLogPrior,
NormalLogPrior,
StudentTLogPrior,
TruncatedGaussianLogPrior,
Expand All @@ -110,7 +114,7 @@ def version(formatted=False):
#
# Log-likelihoods
#
from ._log_likelihoods import (
from ._log_likelihoods import ( # noqa
AR1LogLikelihood,
ARMA11LogLikelihood,
CauchyLogLikelihood,
Expand All @@ -129,7 +133,7 @@ def version(formatted=False):
#
# Boundaries
#
from ._boundaries import (
from ._boundaries import ( # noqa
Boundaries,
LogPDFBoundaries,
RectangularBoundaries,
Expand All @@ -138,7 +142,7 @@ def version(formatted=False):
#
# Error measures
#
from ._error_measures import (
from ._error_measures import ( # noqa
ErrorMeasure,
MeanSquaredError,
NormalisedRootMeanSquaredError,
Expand All @@ -152,7 +156,7 @@ def version(formatted=False):
#
# Parallel function evaluation
#
from ._evaluation import (
from ._evaluation import ( # noqa
evaluate,
Evaluator,
ParallelEvaluator,
Expand All @@ -164,7 +168,7 @@ def version(formatted=False):
#
# Optimisation
#
from ._optimisers import (
from ._optimisers import ( # noqa
curve_fit,
fmin,
Optimisation,
Expand All @@ -173,6 +177,7 @@ def version(formatted=False):
Optimiser,
PopulationBasedOptimiser,
)

from ._optimisers._adam import Adam
from ._optimisers._cmaes import CMAES
from ._optimisers._cmaes_bare import BareCMAES
Expand All @@ -187,7 +192,7 @@ def version(formatted=False):
#
# Diagnostics
#
from ._diagnostics import (
from ._diagnostics import ( # noqa
effective_sample_size,
rhat,
rhat_all_params,
Expand All @@ -197,7 +202,7 @@ def version(formatted=False):
#
# MCMC
#
from ._mcmc import (
from ._mcmc import ( # noqa
mcmc_sample,
MCMCController,
MCMCSampler,
Expand All @@ -206,38 +211,38 @@ def version(formatted=False):
SingleChainMCMC,
)
# base classes first
from ._mcmc._adaptive_covariance import AdaptiveCovarianceMC
from ._mcmc._adaptive_covariance import AdaptiveCovarianceMC # noqa

# methods
from ._mcmc._differential_evolution import DifferentialEvolutionMCMC
from ._mcmc._dram_ac import DramACMC
from ._mcmc._dream import DreamMCMC
from ._mcmc._dual_averaging import DualAveragingAdaption
from ._mcmc._emcee_hammer import EmceeHammerMCMC
from ._mcmc._haario_ac import HaarioACMC
from ._mcmc._haario_bardenet_ac import HaarioBardenetACMC
from ._mcmc._haario_bardenet_ac import AdaptiveCovarianceMCMC
from ._mcmc._hamiltonian import HamiltonianMCMC
from ._mcmc._mala import MALAMCMC
from ._mcmc._metropolis import MetropolisRandomWalkMCMC
from ._mcmc._monomial_gamma_hamiltonian import MonomialGammaHamiltonianMCMC
from ._mcmc._nuts import NoUTurnMCMC
from ._mcmc._population import PopulationMCMC
from ._mcmc._rao_blackwell_ac import RaoBlackwellACMC
from ._mcmc._relativistic import RelativisticMCMC
from ._mcmc._slice_doubling import SliceDoublingMCMC
from ._mcmc._slice_rank_shrinking import SliceRankShrinkingMCMC
from ._mcmc._slice_stepout import SliceStepoutMCMC
from ._mcmc._summary import MCMCSummary
from ._mcmc._differential_evolution import DifferentialEvolutionMCMC # noqa
from ._mcmc._dram_ac import DramACMC # noqa
from ._mcmc._dream import DreamMCMC # noqa
from ._mcmc._dual_averaging import DualAveragingAdaption # noqa
from ._mcmc._emcee_hammer import EmceeHammerMCMC # noqa
from ._mcmc._haario_ac import HaarioACMC # noqa
from ._mcmc._haario_bardenet_ac import HaarioBardenetACMC # noqa
from ._mcmc._haario_bardenet_ac import AdaptiveCovarianceMCMC # noqa
from ._mcmc._hamiltonian import HamiltonianMCMC # noqa
from ._mcmc._mala import MALAMCMC # noqa
from ._mcmc._metropolis import MetropolisRandomWalkMCMC # noqa
from ._mcmc._monomial_gamma_hamiltonian import MonomialGammaHamiltonianMCMC # noqa
from ._mcmc._nuts import NoUTurnMCMC # noqa
from ._mcmc._population import PopulationMCMC # noqa
from ._mcmc._rao_blackwell_ac import RaoBlackwellACMC # noqa
from ._mcmc._relativistic import RelativisticMCMC # noqa
from ._mcmc._slice_doubling import SliceDoublingMCMC # noqa
from ._mcmc._slice_rank_shrinking import SliceRankShrinkingMCMC # noqa
from ._mcmc._slice_stepout import SliceStepoutMCMC # noqa
from ._mcmc._summary import MCMCSummary # noqa


#
# Nested samplers
#
from ._nested import NestedSampler
from ._nested import NestedController
from ._nested._rejection import NestedRejectionSampler
from ._nested._ellipsoid import NestedEllipsoidSampler
from ._nested import NestedSampler # noqa
from ._nested import NestedController # noqa
from ._nested._rejection import NestedRejectionSampler # noqa
from ._nested._ellipsoid import NestedEllipsoidSampler # noqa


#
Expand All @@ -252,13 +257,13 @@ def version(formatted=False):
#
# Sampling initialising
#
from ._sample_initial_points import sample_initial_points
from ._sample_initial_points import sample_initial_points # noqa


#
# Transformations
#
from ._transformation import (
from ._transformation import ( # noqa
ComposedTransformation,
IdentityTransformation,
LogitTransformation,
Expand All @@ -278,7 +283,7 @@ def version(formatted=False):
#
# Noise generators (always import!)
#
from . import noise
from . import noise # noqa

#
# Remove any imported modules, so we don't expose them as part of pints
Expand Down
Loading