diff --git a/CHANGELOG.md b/CHANGELOG.md index 1871b715..4768ba07 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,15 +5,15 @@ All changes important to the user will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/) -## [Unreleased] +## [3.2.0] - 2021-6-1 ### Added - Vectors now support iPython and Jupyter Pretty printing ### Changed - Vector sanitization function has improved handling of non-array inputs -### Removed ### Fixed - FourVectors variable order is now in the correct order - Vectors now work with inputs that aren't arrays +- Patched issue with GAMP failing to write to file ## [3.1.0] - 2020-10-2 ### Added @@ -189,7 +189,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/) - PySim plugin - Packaging -[Unreleased]: https://github.com/JeffersonLab/PyPWA/compare/v3.1.0...development +[Unreleased]: https://github.com/JeffersonLab/PyPWA/compare/v3.2.0...main +[3.2.0]: https://github.com/JeffersonLab/PyPWA/compare/v3.1.0...v3.2.0 [3.1.0]: https://github.com/JeffersonLab/PyPWA/compare/v3.0.0...v3.1.0 [3.0.0]: https://github.com/JeffersonLab/PyPWA/compare/v3.0.0a1...v3.0.0 [3.0.0a1]: https://github.com/JeffersonLab/PyPWA/compare/v2.2.1...v3.0.0a1 diff --git a/PyPWA/libs/vectors/four_vector.py b/PyPWA/libs/vectors/four_vector.py index c2c477d1..2bf2cd9f 100644 --- a/PyPWA/libs/vectors/four_vector.py +++ b/PyPWA/libs/vectors/four_vector.py @@ -20,6 +20,7 @@ from typing import List, Union, Optional as Opt import numpy as np +import pandas import pandas as pd from . import _base_vector, three_vector @@ -73,6 +74,13 @@ def __init__( def __repr__(self): return f"FourVector(e={self.e}, x={self.x}, y={self.y}, z={self.z})" + def _repr_html_(self): + df = pandas.DataFrame() + df['Θ'] = self.get_theta() + df['̅ϕ'] = self.get_phi() + df['Mass'] = self.get_mass() + return df._repr_html_() + def _repr_pretty_(self, p, cycle): if cycle: p.text("FourVector( ?.)") @@ -102,6 +110,12 @@ def _get_repr_data(self): mass = self.get_mass() return theta, phi, mass + def display_raw(self): + df = pandas.DataFrame() + df['e'], df['x'] = self.e, self.x + df['y'], df['z'] = self.y, self.z + print(df) + def __eq__(self, vector: "FourVector") -> bool: return self._compare_vectors(vector) diff --git a/PyPWA/libs/vectors/particle.py b/PyPWA/libs/vectors/particle.py index cc1a0d35..3b88117a 100644 --- a/PyPWA/libs/vectors/particle.py +++ b/PyPWA/libs/vectors/particle.py @@ -26,10 +26,16 @@ from typing import List, Union, Tuple, Optional as Opt import numpy as np -import pandas +import pandas as pd from PyPWA.libs.vectors import FourVector from PyPWA import info as _info +try: + from IPython.display import display +except ImportError: + display = print + + __credits__ = ["Mark Jones"] __author__ = _info.AUTHOR __version__ = _info.VERSION @@ -96,10 +102,10 @@ class Particle(FourVector): def __init__( self, particle_id: int, - e: Union[int, np.ndarray, float, pandas.DataFrame], - x: Opt[Union[float, pandas.Series, np.ndarray]] = None, - y: Opt[Union[float, pandas.Series, np.ndarray]] = None, - z: Opt[Union[float, pandas.Series, np.ndarray]] = None + e: Union[int, np.ndarray, float, pd.DataFrame], + x: Opt[Union[float, pd.Series, np.ndarray]] = None, + y: Opt[Union[float, pd.Series, np.ndarray]] = None, + z: Opt[Union[float, pd.Series, np.ndarray]] = None ): super(Particle, self).__init__(e, x, y, z) self.__particle_id = particle_id @@ -130,9 +136,27 @@ def _repr_pretty_(self, p, cycle): f" x̅Mass={mass})" ) + def _repr_html_(self): + df = pd.DataFrame() + df['Θ'], df['ϕ'] = self.get_theta(), self.get_phi() + df['Mass'] = self.get_mass() + return ( + f'

{self.__particle_id}: {self.__particle_name}

' + f'{df._repr_html_()}' + ) + + def display_raw(self): + df = pd.DataFrame() + df['e'], df['x'], df['y'], df['z'] = self.e, self.x, self.y, self.z + + display( + f'{self.__particle_id}: {self.__particle_name}', raw=True + ) + display(df) + def __getitem__( self, item: Union[int, str, slice] - ) -> Union["Particle", pandas.Series]: + ) -> Union["Particle", pd.Series]: if isinstance(item, (int, slice)) or \ isinstance(item, np.ndarray) and item.dtype == bool: return Particle( @@ -281,6 +305,23 @@ def __repr__(self) -> str: string += repr(particle) + "," return f"ParticlePool({string})" + def _repr_pretty_(self, p, cycle): + if cycle: + return 'ParticlePool( ?.)' + else: + for particle in self.__particle_list: + particle._repr_pretty_(p, cycle) + + def _repr_html_(self): + html = "" + for p in self.__particle_list: + html += p._repr_html_() + return html + + def display_raw(self): + for p in self.__particle_list: + p.display_raw() + def __len__(self): return len(self.__particle_list) diff --git a/PyPWA/plugins/data/gamp.py b/PyPWA/plugins/data/gamp.py index 98ff3048..b65f3945 100644 --- a/PyPWA/plugins/data/gamp.py +++ b/PyPWA/plugins/data/gamp.py @@ -197,7 +197,7 @@ def write(self, data: vectors.ParticlePool): for p in data.iter_particles(): self.__file_handle.write( "%d %d %.20f %.20f %.20f %.20f\n" % ( - p.id, p.charge, p.x, p.y, p.z, p.e + p.id, p.charge, p.x[0], p.y[0], p.z[0], p.e[0] ) ) diff --git a/setup.py b/setup.py index 54e85c01..25b21fe3 100644 --- a/setup.py +++ b/setup.py @@ -16,19 +16,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import os import setuptools -from distutils.extension import Extension -from pathlib import Path - -import numpy as np - -try: - from Cython.Distutils import build_ext -except ImportError: - print("Install Cython before building!") - raise - __author__ = "PyPWA Team and Contributors" __license__ = "GPLv3" @@ -37,75 +25,60 @@ __status__ = "development" -""" -Why are these entry points disable? - -So PyPWA 2.0 had support for running scripts directly from the local -directory, and these entry points were that functionality. With PyPWA 3 -though we dumped these scripts and instead moved almost entirely to -Jupyter notebooks since they offer a solid amount of flexibility for -physicists without the complicated overhead of importing Python scripts -into an already running interpreter. Maybe eventually we'll fully remove -them, bring them back if the physicists want them, or roll out other -helping utilities like pymask if they need it. -""" - - progs = "PyPWA.progs" + entry_points = { "console_scripts": [ f"pymask = {progs}.masking:start_masking", -# f"pybin = {progs}.binner:start_binning", + f"pybin = {progs}.binner:start_binning", # f"pysimulate = {progs}.simulation:simulation", # f"pyfit = {progs}.pyfit:start_fitting" ] } -extension_kwargs = { - "sources": [], - "language": "c++", - "extra_compile_args": { - "gcc": ["-Wall", "-std=c++11", "-fPIC"], - "include_dirs": [np.get_include()] - } +requires = [ + "tqdm", # Progress Bars + "iminuit<2", # Default minimizer + "scipy", # Needed for Nestle with multiple ellipsoids. + "numpy", # Arrays and optimizations + "pyyaml", # YAML Parser + "tabulate", # Tables for iminuit + "appdirs", # Attempts to find data locations + "tables", # Stores table in a specialized table format + "pandas", # A powerful statistics package that's used everywhere + "openpyxl", # Provides support for XLXS, used for resonance, + "matplotlib", # Adds support for plotting + "numexpr" # Accelerates numpy by removing intermediate steps +] + +extras = { + # Adds value correction to users configuration file + "fuzzing": ["fuzzywuzzy", "python-Levenshtein"] } - -setup_kargs = { - "name": "PyPWA", - "version": __version__, - "author": __author__, - "author_email": __email__, - "packages": setuptools.find_packages(), - "url": "http://pypwa.jlab.org", - "license": __license__, - "ext_module": [Extension("_lib", **extension_kwargs)], - "description": "General Partial Wave Analysis", - "test_suite": "tests", - "entry_points": entry_points, - "keywords": "PyPWA GeneralFitting Partial Wave Analysis Minimization", - "install_requires": [ - "cython", # C/C++ Optimizations - "tqdm", # Progress Bars - "iminuit<2", # Default minimizer - "scipy", # Needed for Nestle with multiple ellipsoids. - "numpy", # Arrays and optimizations - "pyyaml", # YAML Parser - "tabulate", # Tables for iminuit - "appdirs", # Attempts to find data locations - "tables", # Stores table in a specialized table format - "pandas", # A powerful statistics package that's used everywhere - "openpyxl", # Provides support for XLXS, used for resonance, - "matplotlib", # Adds support for plotting - "numexpr" # Accelerates numpy by removing intermediate steps - ], - "test_require": [ - 'pytest', - 'pytest-runner', - "pytest-cov" - ], - "extras_requires": {"fuzzing": ["fuzzywuzzy", "python-Levenshtein"]}, - "classifiers": [ +tests = [ + 'pytest', + 'pytest-runner', + "pytest-cov" +] + + +setuptools.setup( + name="PyPWA", + version=__version__, + author=__author__, + author_email=__email__, + packages=setuptools.find_packages(), + url="http//pypwa.jlab.org", + license=__license__, + description="General Partial Wave Analysis", + test_suite="tests", + entry_points=entry_points, + keywords="PyPWA GeneralFitting Partial Wave Analysis Minimization", + install_requires=requires, + tests_require=tests, + extras_require=extras, + classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Science/Research", @@ -119,63 +92,5 @@ "Programming Language :: Python :: 3.8", "Topic :: Scientific/Engineering :: Mathematics", "Topic :: Scientific/Engineering :: Physics" - ], - "zip_safe": False -} - - -""" -Anaconda doesn't have NVCC, but it does supply all the other -required libraries. Also, if there are no CUDA libs on the system -(No NVIDIA GPU) then CUDA integration will be disabled. -""" - -base_dir = None -for directory in os.environ["PATH"].split(os.pathsep): - if Path(directory + "/nvcc").exists(): - nvcc_location = Path(directory + "/nvcc") - base_dir = Path(os.environ["CONDA_PREFIX"]) - break - -if base_dir and base_dir.exists() and False: - print("Compiling with CUDA support.") - - class custom_build_extension(build_ext): - - def build_extensions(self): - self.src_extensions.append(".cu") - default_compiler_so = self.compiler_so - comp = self._compile - - def _compile(obj, src, ext, cc_args, extra_postargs, pp_opts): - if os.path.splitext(src)[1] == '.cu': - self.set_executable('compiler_so', str(nvcc_location)) - postargs = extra_postargs["nvcc"] - else: - postargs = extra_postargs["gcc"] - - comp(obj, src, ext, cc_args, postargs, pp_opts) - self.compile_so = default_compiler_so - - self._compile = _compile - build_ext.build_extensions(self) - - extension_kwargs["sources"] = [] - extension_kwargs["libraries"] = ["cudart"] - extension_kwargs["runtime_library_dirs"] = base_dir / "lib64" - extension_kwargs["extra_compile_args"]["nvcc"] = [ - "-fPIC", "--ccbin gcc", "-shared", "-arch=sm_61", - "-gencode=arch=compute_61,code=sm_61", - "-gencode=arch=compute_70,code=sm_70", - "-gencode=arch=compute_70,code=compute_70", - "-gencode=arch=compute_75,code=sm_75", - "-gencode=arch=compute_75,code=compute_75" ] - extension_kwargs["extra_compile_args"]["include_dirs"].append( - base_dir / "include" - ) - setup_kargs["ext_module"].append(Extension("_cuda", **extension_kwargs)) - setup_kargs["cmdclass"] = {"build_ext": custom_build_extension} - - -setuptools.setup(**setup_kargs) +)