|
6 | 6 | import logging
|
7 | 7 | import numpy as np
|
8 | 8 | import scipy.optimize
|
| 9 | +import iminuit |
9 | 10 | from typing import Optional, Dict, Any, List
|
10 | 11 |
|
11 | 12 | from skyllh.core.parameters import FitParameterSet
|
@@ -663,6 +664,184 @@ def is_repeatable(self, status):
|
663 | 664 | return False
|
664 | 665 |
|
665 | 666 |
|
| 667 | +class MinuitMinimizerImpl(MinimizerImpl): |
| 668 | + """The MinuitMinimizerImpl class provides the minimizer implementation for |
| 669 | + minuit minimizer. |
| 670 | + """ |
| 671 | + def __init__(self, maxls=100): |
| 672 | + """Creates a new minimizer instance. |
| 673 | +
|
| 674 | + Parameters |
| 675 | + ---------- |
| 676 | + maxls : int |
| 677 | + The maximum number of line search steps for an interation. |
| 678 | + """ |
| 679 | + super(MinuitMinimizerImpl, self).__init__() |
| 680 | + |
| 681 | + self._maxls = maxls |
| 682 | + self._minuit = iminuit.Minuit |
| 683 | + |
| 684 | + def minimize(self, initials, bounds, func, func_args=None, **kwargs): |
| 685 | + """Minimizes the given function ``func`` with the given initial function |
| 686 | + argument values ``initials``. |
| 687 | +
|
| 688 | + Parameters |
| 689 | + ---------- |
| 690 | + initials : 1D numpy ndarray |
| 691 | + The ndarray holding the initial values of all the fit parameters. |
| 692 | + bounds : 2D (N_fitparams,2)-shaped numpy ndarray |
| 693 | + The ndarray holding the boundary values (vmin, vmax) of the fit |
| 694 | + parameters. |
| 695 | + func : callable |
| 696 | + The function that should get minimized. |
| 697 | + The call signature must be |
| 698 | +
|
| 699 | + ``__call__(x, *args)`` |
| 700 | +
|
| 701 | + The return value of ``func`` must be (f, grads), the function value |
| 702 | + at the function arguments ``x`` and the ndarray with the values of |
| 703 | + the function gradient for each fit parameter, if the |
| 704 | + ``func_provides_grads`` keyword argument option is set to True. |
| 705 | + If set to False, ``func`` must return only the function value. |
| 706 | + func_args : sequence | None |
| 707 | + Optional sequence of arguments for ``func``. |
| 708 | +
|
| 709 | + Additional Keyword Arguments |
| 710 | + ---------------------------- |
| 711 | + Additional keyword arguments include options for this minimizer |
| 712 | + implementation. |
| 713 | +
|
| 714 | + Returns |
| 715 | + ------- |
| 716 | + xmin : 1D ndarray |
| 717 | + The array containing the function arguments at the function's |
| 718 | + minimum. |
| 719 | + fmin : float |
| 720 | + The function value at its minimum. |
| 721 | + status : dict |
| 722 | + The status dictionary with information about the minimization |
| 723 | + process. The following information are provided: |
| 724 | +
|
| 725 | + niter : int |
| 726 | + The number of iterations needed to find the minimum. |
| 727 | + warnflag : int |
| 728 | + The warning flag indicating if the minimization did converge. |
| 729 | + The possible values are: |
| 730 | +
|
| 731 | + 0: The minimization converged. |
| 732 | + """ |
| 733 | + if(func_args is None): |
| 734 | + func_args = tuple() |
| 735 | + if(kwargs is None): |
| 736 | + kwargs = {} |
| 737 | + |
| 738 | + if('maxls' not in kwargs): |
| 739 | + kwargs['maxls'] = self._maxls |
| 740 | + |
| 741 | + def func_val(*args): |
| 742 | + return func(args, *func_args)[0] |
| 743 | + |
| 744 | + def func_grads(*args): |
| 745 | + return func(args, *func_args)[1] |
| 746 | + |
| 747 | + # Set initial values for the fitting parameters. |
| 748 | + p_names = ['p{}'.format(_) for _ in range(len(initials))] |
| 749 | + fit_arg = dict(zip(p_names, initials)) |
| 750 | + |
| 751 | + p_bounds = ['limit_p{}'.format(_) for _ in range(len(initials))] |
| 752 | + pbounds = [bounds[i] for i in range(len(p_bounds))] |
| 753 | + fit_arg.update(dict(zip(p_bounds, pbounds))) |
| 754 | + |
| 755 | + fit_min = self._minuit( |
| 756 | + fcn=func_val, |
| 757 | + grad=func_grads, |
| 758 | + forced_parameters=p_names, |
| 759 | + pedantic=False, |
| 760 | + print_level=0, |
| 761 | + **fit_arg) |
| 762 | + |
| 763 | + fit_min.migrad(resume=False) |
| 764 | + ncalls = fit_min.ncalls |
| 765 | + for l in range(25): |
| 766 | + r = fit_min.migrad(resume=True) |
| 767 | + ncalls += fit_min.ncalls |
| 768 | + if r[0]['has_valid_parameters']: |
| 769 | + break |
| 770 | + |
| 771 | + xmin = fit_min.args |
| 772 | + fmin = fit_min.fval |
| 773 | + |
| 774 | + status = dict() |
| 775 | + status['warnflag'] = 0 |
| 776 | + status['nit'] = ncalls |
| 777 | + |
| 778 | + return (xmin, fmin, status) |
| 779 | + |
| 780 | + def get_niter(self, status): |
| 781 | + """Returns the number of iterations needed to find the minimum. |
| 782 | +
|
| 783 | + Parameters |
| 784 | + ---------- |
| 785 | + status : dict |
| 786 | + The dictionary with the status information about the minimization |
| 787 | + process. |
| 788 | +
|
| 789 | + Returns |
| 790 | + ------- |
| 791 | + niter : int |
| 792 | + The number of iterations needed to find the minimum. |
| 793 | + """ |
| 794 | + return status['nit'] |
| 795 | + |
| 796 | + def has_converged(self, status): |
| 797 | + """Analyzes the status information dictionary if the minimization |
| 798 | + process has converged. By definition the minimization process has |
| 799 | + converged if ``status['warnflag']`` equals 0. |
| 800 | +
|
| 801 | + Parameters |
| 802 | + ---------- |
| 803 | + status : dict |
| 804 | + The dictionary with the status information about the minimization |
| 805 | + process. |
| 806 | +
|
| 807 | + Returns |
| 808 | + ------- |
| 809 | + converged : bool |
| 810 | + The flag if the minimization has converged (True), or not (False). |
| 811 | + """ |
| 812 | + if(status['warnflag'] == 0): |
| 813 | + return True |
| 814 | + return False |
| 815 | + |
| 816 | + def is_repeatable(self, status): |
| 817 | + """Checks if the minimization process can be repeated to get a better |
| 818 | + result. It's repeatable if |
| 819 | +
|
| 820 | + `status['warnflag'] == 2 and 'FACTR' in str(status['task'])` |
| 821 | +
|
| 822 | + Parameters |
| 823 | + ---------- |
| 824 | + status : dict |
| 825 | + The dictionary with the status information about the last |
| 826 | + minimization process. |
| 827 | +
|
| 828 | + Returns |
| 829 | + ------- |
| 830 | + repeatable : bool |
| 831 | + The flag if the minimization process can be repeated to obtain a |
| 832 | + better minimum. |
| 833 | + """ |
| 834 | + if(status['warnflag'] == 2): |
| 835 | + task = str(status['task']) |
| 836 | + if('FACTR' in task): |
| 837 | + return True |
| 838 | + if('ABNORMAL_TERMINATION_IN_LNSRCH' in task): |
| 839 | + # This is causes most probably by starting the minimization at |
| 840 | + # a parameter boundary. |
| 841 | + return True |
| 842 | + return False |
| 843 | + |
| 844 | + |
666 | 845 | class NRNsScan2dMinimizerImpl(NR1dNsMinimizerImpl):
|
667 | 846 | """The NRNsScan2dMinimizerImpl class provides a minimizer implementation for
|
668 | 847 | the R2->R1 function where the first dimension is minimized using the
|
|
0 commit comments