Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-41209: Switch to pyproject.toml #5

Merged
merged 7 commits into from
Nov 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions pipelines/coaddDiffMatchedQualityExtended.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,19 +24,19 @@ tasks:

atools.matchedRefMagDiff: MatchedRefCoaddDiffMagTool
atools.matchedRefMagDiff.fluxes_default.cmodel_err: model_err
atools.matchedRefMagDiff.produce.xLims: lims_mag_x
atools.matchedRefMagDiff.produce.yLims: lims_mag_diff
atools.matchedRefMagDiff.produce.plot.xLims: lims_mag_x
atools.matchedRefMagDiff.produce.plot.yLims: lims_mag_diff

atools.matchedRefMagChi: MatchedRefCoaddDiffMagTool
atools.matchedRefMagChi.fluxes_default.cmodel_err: model_err
atools.matchedRefMagChi.produce.xLims: lims_mag_x
atools.matchedRefMagChi.produce.yLims: lims_mag_chi
atools.matchedRefMagChi.produce.plot.xLims: lims_mag_x
atools.matchedRefMagChi.produce.plot.yLims: lims_mag_chi
atools.matchedRefMagChi.compute_chi: true

atools.matchedRefPositionRaDiff: MatchedRefCoaddDiffPositionTool
atools.matchedRefPositionRaDiff.fluxes_default.cmodel_err: model_err
atools.matchedRefPositionRaDiff.coord_label: R.A.
atools.matchedRefPositionRaDiff.coord_meas: mpf_psexpdev_cen_ra
atools.matchedRefPositionRaDiff.coord_meas: coord_ra
atools.matchedRefPositionRaDiff.coord_ref: refcat_ra
atools.matchedRefPositionRaDiff.scale_factor: 3600000
atools.matchedRefPositionRaDiff.produce.plot.xLims: lims_mag_x
Expand All @@ -45,7 +45,7 @@ tasks:
atools.matchedRefPositionDecDiff: MatchedRefCoaddDiffPositionTool
atools.matchedRefPositionDecDiff.fluxes_default.cmodel_err: model_err
atools.matchedRefPositionDecDiff.coord_label: Dec.
atools.matchedRefPositionDecDiff.coord_meas: mpf_psexpdev_cen_dec
atools.matchedRefPositionDecDiff.coord_meas: coord_dec
atools.matchedRefPositionDecDiff.coord_ref: refcat_dec
atools.matchedRefPositionDecDiff.scale_factor: 3600000
atools.matchedRefPositionDecDiff.produce.plot.xLims: lims_mag_x
Expand All @@ -63,6 +63,8 @@ tasks:
key_flux = f"mpf_{parameters.name_model}_{{band}}_flux",
key_flux_error = "{key_flux}_err"
)
coord_ra = f"mpf_{parameters.name_model}_cen_ra"
coord_dec = f"mpf_{parameters.name_model}_cen_dec"
lims_mag_x = (parameters.mag_x_min, parameters.mag_x_max)
lims_mag_chi = (parameters.mag_chi_min, parameters.mag_chi_max)
lims_mag_diff = (parameters.mag_diff_min, parameters.mag_diff_max)
Expand Down
10 changes: 5 additions & 5 deletions pipelines/fit.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ tasks:
connections.cat_output: deepCoadd_psgauss_multiprofit
python: |
from lsst.meas.extensions.multiprofit.fit_coadd_multiband import MultiProFitSourceTask
from multiprofit.componentconfig import SersicConfig, SersicIndexConfig
from lsst.multiprofit.componentconfig import SersicConfig, SersicIndexConfig
config.fit_coadd_multiband.retarget(MultiProFitSourceTask)
config.fit_coadd_multiband.n_pointsources = 1
config.fit_coadd_multiband.sersics = {
Expand All @@ -29,7 +29,7 @@ tasks:
connections.cat_output: deepCoadd_psexp_multiprofit
python: |
from lsst.meas.extensions.multiprofit.fit_coadd_multiband import MultiProFitSourceTask
from multiprofit.componentconfig import SersicConfig, SersicIndexConfig
from lsst.multiprofit.componentconfig import SersicConfig, SersicIndexConfig
config.fit_coadd_multiband.retarget(MultiProFitSourceTask)
config.fit_coadd_multiband.n_pointsources = 1
config.fit_coadd_multiband.sersics = {
Expand All @@ -45,7 +45,7 @@ tasks:
connections.cat_output: deepCoadd_exp_multiprofit
python: |
from lsst.meas.extensions.multiprofit.fit_coadd_multiband import MultiProFitSourceTask
from multiprofit.componentconfig import SersicConfig, SersicIndexConfig
from lsst.multiprofit.componentconfig import SersicConfig, SersicIndexConfig
config.fit_coadd_multiband.retarget(MultiProFitSourceTask)
config.fit_coadd_multiband.sersics = {
"exp": SersicConfig(
Expand All @@ -60,7 +60,7 @@ tasks:
connections.cat_output: deepCoadd_exp_fixedcen_multiprofit
python: |
from lsst.meas.extensions.multiprofit.fit_coadd_multiband import MultiProFitSourceTask
from multiprofit.componentconfig import SersicConfig, SersicIndexConfig
from lsst.multiprofit.componentconfig import SersicConfig, SersicIndexConfig
config.fit_coadd_multiband.retarget(MultiProFitSourceTask)
config.fit_coadd_multiband.fit_cen_x = False
config.fit_coadd_multiband.fit_cen_y = False
Expand All @@ -77,7 +77,7 @@ tasks:
connections.cat_output: deepCoadd_psexpdev_multiprofit
python: |
from lsst.meas.extensions.multiprofit.fit_coadd_multiband import MultiProFitSourceTask
from multiprofit.componentconfig import SersicConfig, SersicIndexConfig
from lsst.multiprofit.componentconfig import SersicConfig, SersicIndexConfig
config.fit_coadd_multiband.retarget(MultiProFitSourceTask)
config.fit_coadd_multiband.n_pointsources = 1
config.fit_coadd_multiband.sersics = {
Expand Down
99 changes: 99 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"

[project]
name = "lsst-meas-extensions-multiprofit"
authors = [
{name="Rubin Observatory Data Management", email="[email protected]"},
]
description = "Tasks and pipelines for running multiprofit with the LSST Butler"
readme = "README.rst"
requires-python = ">=3.10"
license = {file = "LICENSE"}
classifiers = [
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In theory there should be the GPL license classifier here.

"Intended Audience :: Science/Research",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Scientific/Engineering :: Astronomy",
]
keywords = [
"astronomy",
"astrophysics",
"fitting",
"lsst",
"models",
"modeling",
]
dependencies = [
"astropy",
"galsim",
"gauss2d",
"gauss2dfit",
"lsst-multiprofit",
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This implies you are putting lsst-multiprofit on PyPI. Are you? I know this package itself can't go on PyPI because of the meas_base dependency.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does it imply availability on PyPI? Once multiprofit is scons-built, it shows up on pip list with the same version as it would if pip-installed. On the other hand, gauss2d(fit) don't when eups-built because I skipped the pip install step, figuring it would be unnecessary. But perhaps none of this matters because presumably this package won't be pip-installable until all of the dependencies like meas_base are too...

Copy link
Member

@timj timj Nov 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

pip will read this dependency list. It will, I think, just try to see if they are installed before trying to check on PyPI so if everything has happened beforehand if will be fine. One issue you will have is that lsst-multiprofit is the name here but EUPS thinks it's called multiprofit and we don't yet install the proper egg information to allow importlib.metadata to work out the right answer -- that's something I'd like to fix though.

Yes, none of this matters because meas_base is not going to be pip installable.

"matplotlib",
"numpy",
"pydantic",
"pytest",
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

pytest is presumably not a run time dependency? You can add a separate optional dependencies entry in the the file and declare test dependencies in there.

[project.optional-dependencies]
test = [
    "pytest >= 3.2",
]

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is that v3.2 minimum from a feature we're using in the stack or a compatibility issue?

More generally I couldn't figure out how to set minimum versions of anything for standalone packages like multiprofit, beyond that I made a py3.8 conda env and confirmed that it wouldn't work. Is there anything else I could/should do other than follow changes to other stack packages like resources/daf_butler?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The 3.2 was just an example. It's likely wildly out of date. The point is to separate runtime deps from testing deps. Minimum versions are done the same way pytest <5,>=3.2 or something.

]
dynamic = ["version"]

[project.urls]
"Homepage" = "https://github.com/lsst-dm/meas_extensions_multiprofit"

[project.optional-dependencies]
test = [
"pytest",
]

[tool.setuptools.packages.find]
where = ["python"]

[tool.black]
line-length = 110
target-version = ["py311"]

[tool.isort]
profile = "black"
line_length = 110

[tool.ruff]
exclude = [
"__init__.py",
"tests/*.py",
]
ignore = [
"N802",
"N803",
"N806",
"N812",
"N815",
"N816",
"N999",
"D107",
"D105",
"D102",
"D104",
"D100",
"D200",
"D205",
"D400",
]
line-length = 110
select = [
"E", # pycodestyle
"F", # pycodestyle
"N", # pep8-naming
"W", # pycodestyle
"D", # pydocstyle
]
target-version = "py311"

[tool.ruff.pycodestyle]
max-doc-length = 79

[tool.ruff.pydocstyle]
convention = "numpy"
1 change: 1 addition & 0 deletions python/lsst/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,5 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>.

import pkgutil

__path__ = pkgutil.extend_path(__path__, __name__)
1 change: 1 addition & 0 deletions python/lsst/meas/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,5 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>.

import pkgutil

__path__ = pkgutil.extend_path(__path__, __name__)
1 change: 1 addition & 0 deletions python/lsst/meas/extensions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,5 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>.

import pkgutil

__path__ = pkgutil.extend_path(__path__, __name__)
Original file line number Diff line number Diff line change
Expand Up @@ -30,27 +30,35 @@


class CatalogAction(ConfigurableAction):
"""Configurable action to return a catalog."""

def __call__(self, data, **kwargs):
return data


class MergeMultibandFluxes(CatalogAction):
"""Configurable action to merge single-band flux tables into one."""

name_model = pexConfig.Field[str](doc="The name of the model that fluxes are measured from", default="")

def __call__(self, data, **kwargs):
datasetType = kwargs.get("datasetType")
prefix_model = self.name_model + ("_" if self.name_model else "")
if self.name_model and hasattr(data, "meta") and datasetType and (
config := data.meta.get(datasetType)):
prefix = config.get('config', {}).get("prefix_column", "")
if (
self.name_model
and hasattr(data, "meta")
and datasetType
and (config := data.meta.get(datasetType))
):
prefix = config.get("config", {}).get("prefix_column", "")
else:
prefix = ""
columns_rest = []
columns_flux_band = defaultdict(list)
for column in data.columns:
if not prefix or column.startswith(prefix):
if column.endswith('_flux'):
band = column.split('_')[-2]
if column.endswith("_flux"):
band = column.split("_")[-2]
columns_flux_band[band].append(column)
else:
columns_rest.append(column)
Expand All @@ -60,18 +68,17 @@ def __call__(self, data, **kwargs):
flux = np.nansum([data[column] for column in columns_band], axis=0)
data[column_flux] = flux

columns_band_err = [f'{column}_err' for column in columns_band]
errors = [data[column]**2 for column in columns_band_err if column in data.columns]
columns_band_err = [f"{column}_err" for column in columns_band]
errors = [data[column] ** 2 for column in columns_band_err if column in data.columns]
if errors:
flux_err = np.sqrt(np.nansum(errors, axis=0))
flux_err[flux_err == 0] = np.nan
column_flux_err = f'{column_flux}_err'
column_flux_err = f"{column_flux}_err"
data[column_flux_err] = flux_err

if prefix_model:
colnames = [
col if (col in columns_rest)
else f"{prefix}{prefix_model}{col.split(prefix, 1)[1]}"
col if (col in columns_rest) else f"{prefix}{prefix_model}{col.split(prefix, 1)[1]}"
for col in data.columns
]
if hasattr(data, "rename_columns"):
Expand All @@ -83,20 +90,25 @@ def __call__(self, data, **kwargs):


class InputConfig(pexConfig.Config):
"""Config for inputs to ConsolidateAstropyTableTask"""

doc = pexConfig.Field[str](doc="Doc for connection", optional=False)
action = ConfigurableActionField[CatalogAction](
doc="Action to modify the input table",
default=None,
)
columns = pexConfig.ListField[str](doc="Column names to copy; default of None copies all",
optional=True, default=None)
columns = pexConfig.ListField[str](
doc="Column names to copy; default of None copies all", optional=True, default=None
)
column_id = pexConfig.Field[str](doc="ID column to merge", optional=False, default="objectId")
is_multiband = pexConfig.Field[bool](doc="Whether the dataset is multiband or not", default=False)
is_multipatch = pexConfig.Field[bool](doc="Whether the dataset is multipatch or not", default=False)
storageClass = pexConfig.Field[str](doc="Storage class for DatasetType", default="ArrowAstropy")


class ConsolidateAstropyTableConfigBase(pexConfig.Config):
"""Config for ConsolidateAstropyTableTask"""

inputs = pexConfig.ConfigDictField(
doc="Mapping of input dataset type config by name",
keytype=str,
Expand All @@ -105,8 +117,9 @@ class ConsolidateAstropyTableConfigBase(pexConfig.Config):
)


class ConsolidateAstropyTableConnections(pipeBase.PipelineTaskConnections,
dimensions=("tract", "skymap")):
class ConsolidateAstropyTableConnections(pipeBase.PipelineTaskConnections, dimensions=("tract", "skymap")):
"""Connections for ConsolidateAstropyTableTask"""

cat_output = connectionTypes.Output(
doc="Per-tract horizontal concatenation of the input AstropyTables",
name="objectAstropyTable_tract",
Expand All @@ -130,19 +143,23 @@ def __init__(self, *, config: ConsolidateAstropyTableConfigBase):
deferLoad=config_input.columns is not None,
)
if hasattr(self, name):
raise ValueError(f"{config_input=} {name=} is invalid, due to being an existing attribute"
f" of {self=}")
raise ValueError(
f"{config_input=} {name=} is invalid, due to being an existing attribute" f" of {self=}"
)
setattr(self, name, connection)


class ConsolidateAstropyTableConfig(pipeBase.PipelineTaskConfig, ConsolidateAstropyTableConfigBase,
pipelineConnections=ConsolidateAstropyTableConnections):
pass
class ConsolidateAstropyTableConfig(
pipeBase.PipelineTaskConfig,
ConsolidateAstropyTableConfigBase,
pipelineConnections=ConsolidateAstropyTableConnections,
):
"""PipelineTaskConfig for ConsolidateAstropyTableTask"""


class ConsolidateAstropyTableTask(pipeBase.PipelineTask):
"""Write patch-merged astropy tables to a tract-level astropy table.
"""
"""Write patch-merged astropy tables to a tract-level astropy table."""

_DefaultName = "consolidateAstropyTable"
ConfigClass = ConsolidateAstropyTableConfig

Expand All @@ -169,7 +186,7 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs):

if inputConfig.columns is not None:
columns = inputConfig.columns
data_in = data_in.get(parameters={'columns': columns})
data_in = data_in.get(parameters={"columns": columns})
else:
columns = tuple(data_in.columns)

Expand All @@ -180,7 +197,7 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs):

if not inputConfig.is_multiband:
columns_new = [
column if column == inputConfig.column_id else f'{band}_{column}'
column if column == inputConfig.column_id else f"{band}_{column}"
for column in columns
]
data_in.rename_columns(columns, columns_new)
Expand Down Expand Up @@ -236,10 +253,13 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs):

for name, data_name in data.items():
config_input = self.config.inputs[name]
tables = [apTab.hstack([data_name[patch][band] for band in bands_sorted], join_type='exact')
if not config_input.is_multiband else data_name[patch][band_null]
for patch in (patches_ref if not config_input.is_multipatch else patches_null)]
data[name] = tables[0] if (len(tables) == 1) else apTab.vstack(tables, join_type='exact')
tables = [
apTab.hstack([data_name[patch][band] for band in bands_sorted], join_type="exact")
if not config_input.is_multiband
else data_name[patch][band_null]
for patch in (patches_ref if not config_input.is_multipatch else patches_null)
]
data[name] = tables[0] if (len(tables) == 1) else apTab.vstack(tables, join_type="exact")
table = apTab.hstack([data[name] for name in self.config.inputs])

butlerQC.put(pipeBase.Struct(cat_output=table), outputRefs)
Loading